From 92186bddfd036323d25eb28763ffbad12e54b201 Mon Sep 17 00:00:00 2001 From: Shogun Date: Wed, 13 Apr 2022 14:32:04 +0200 Subject: [PATCH 01/19] feat: Updated build system and removed legacy browsers compatibility. --- .airtap.yml | 1 - .babelrc | 17 - .eslintrc.js | 11 + .github/workflows/ci.yml | 20 +- .github/workflows/sauce.yml | 2 +- .gitignore | 11 +- .npmignore | 12 - README.md | 45 +- build/.gitignore | 1 - build/build.js | 194 --- build/build.mjs | 185 +++ build/common-replacements.js | 59 - build/files.js | 381 ------ build/files.mjs | 26 + build/footers.mjs | 47 + build/replacements.mjs | 179 +++ build/test-replacements.js | 473 ------- c8.json | 9 + doc/wg-meetings/2015-01-30.md | 60 - errors-browser.js | 127 -- errors.js | 116 -- examples/CAPSLOCKTYPER.JS | 32 - examples/capslock-type.cjs | 29 + examples/typer.js | 17 - examples/typer.mjs | 22 + experimentalWarning.js | 17 - lib/_stream_duplex.js | 139 -- lib/_stream_passthrough.js | 39 - lib/_stream_readable.js | 1124 ----------------- lib/_stream_transform.js | 201 --- lib/_stream_writable.js | 697 ---------- lib/internal/streams/async_iterator.js | 207 --- lib/internal/streams/buffer_list.js | 210 --- lib/internal/streams/destroy.js | 105 -- lib/internal/streams/end-of-stream.js | 104 -- lib/internal/streams/from-browser.js | 3 - lib/internal/streams/from.js | 64 - lib/internal/streams/pipeline.js | 97 -- lib/internal/streams/state.js | 27 - lib/internal/streams/stream-browser.js | 1 - lib/internal/streams/stream.js | 1 - package.json | 109 +- prettier.config.cjs | 7 + readable-browser.js | 9 - readable.js | 16 - src/browser.js | 16 + src/index.js | 31 + src/test/browser/test-stream-big-packet.js | 68 + src/test/browser/test-stream-big-push.js | 70 + src/test/browser/test-stream-duplex.js | 36 + src/test/browser/test-stream-end-paused.js | 30 + src/test/browser/test-stream-finished.js | 65 + src/test/browser/test-stream-ispaused.js | 27 + .../browser/test-stream-pipe-after-end.js | 67 + .../browser/test-stream-pipe-cleanup-pause.js | 46 + src/test/browser/test-stream-pipe-cleanup.js | 115 ++ .../test-stream-pipe-error-handling.js | 99 ++ src/test/browser/test-stream-pipe-event.js | 34 + .../test-stream-pipe-without-listenerCount.js | 20 + src/test/browser/test-stream-pipeline.js | 109 ++ src/test/browser/test-stream-push-order.js | 32 + src/test/browser/test-stream-push-strings.js | 55 + ...stream-readable-constructor-set-methods.js | 23 + .../browser/test-stream-readable-event.js | 105 ++ src/test/browser/test-stream-sync-write.js | 46 + ...tream-transform-constructor-set-methods.js | 35 + ...tream-transform-objectmode-falsey-value.js | 35 + .../test-stream-transform-split-objectmode.js | 57 + .../test-stream-unshift-empty-chunk.js | 62 + .../browser/test-stream-unshift-read-race.js | 122 ++ ...stream-writable-change-default-encoding.js | 69 + ...stream-writable-constructor-set-methods.js | 38 + .../test-stream-writable-decoded-encoding.js | 49 + src/test/browser/test-stream-writev.js | 101 ++ ...est-stream2-base64-single-char-read-end.js | 39 + .../browser/test-stream2-compatibility.js | 34 + .../browser/test-stream2-large-read-stall.js | 60 + src/test/browser/test-stream2-objects.js | 304 +++++ .../test-stream2-pipe-error-handling.js | 89 ++ .../test-stream2-pipe-error-once-listener.js | 39 + src/test/browser/test-stream2-push.js | 117 ++ ...st-stream2-readable-empty-buffer-no-eof.js | 93 ++ .../test-stream2-readable-from-list.js | 65 + .../test-stream2-readable-legacy-drain.js | 52 + .../test-stream2-readable-non-empty-end.js | 58 + .../test-stream2-readable-wrap-empty.js | 23 + .../browser/test-stream2-readable-wrap.js | 94 ++ src/test/browser/test-stream2-set-encoding.js | 335 +++++ src/test/browser/test-stream2-transform.js | 485 +++++++ src/test/browser/test-stream2-unpipe-drain.js | 63 + src/test/browser/test-stream2-writable.js | 430 +++++++ .../browser/test-stream3-pause-then-read.js | 147 +++ .../errors.js => src/test/ours/test-errors.js | 104 +- src/test/ours/test-lolex-fake-timers.js | 40 + src/test/ours/test-stream-sync-write.js | 44 + src/test/test-browser.js | 84 ++ src/util.js | 49 + src/uv-browser.js | 93 ++ tap.yml | 5 + test/browser.js | 81 -- test/browser/test-stream-big-packet.js | 62 - test/browser/test-stream-big-push.js | 68 - test/browser/test-stream-duplex.js | 35 - test/browser/test-stream-end-paused.js | 32 - test/browser/test-stream-finished.js | 60 - test/browser/test-stream-ispaused.js | 27 - test/browser/test-stream-pipe-after-end.js | 64 - .../browser/test-stream-pipe-cleanup-pause.js | 42 - test/browser/test-stream-pipe-cleanup.js | 108 -- .../test-stream-pipe-error-handling.js | 102 -- test/browser/test-stream-pipe-event.js | 32 - .../test-stream-pipe-without-listenerCount.js | 27 - test/browser/test-stream-pipeline.js | 112 -- test/browser/test-stream-push-order.js | 33 - test/browser/test-stream-push-strings.js | 49 - ...stream-readable-constructor-set-methods.js | 22 - test/browser/test-stream-readable-event.js | 114 -- test/browser/test-stream-sync-write.js | 39 - ...tream-transform-constructor-set-methods.js | 35 - ...tream-transform-objectmode-falsey-value.js | 36 - .../test-stream-transform-split-objectmode.js | 58 - .../test-stream-unshift-empty-chunk.js | 63 - test/browser/test-stream-unshift-read-race.js | 110 -- ...stream-writable-change-default-encoding.js | 64 - ...stream-writable-constructor-set-methods.js | 40 - .../test-stream-writable-decoded-encoding.js | 45 - test/browser/test-stream-writev.js | 105 -- ...est-stream2-base64-single-char-read-end.js | 41 - test/browser/test-stream2-compatibility.js | 33 - test/browser/test-stream2-large-read-stall.js | 62 - test/browser/test-stream2-objects.js | 306 ----- .../test-stream2-pipe-error-handling.js | 88 -- .../test-stream2-pipe-error-once-listener.js | 41 - test/browser/test-stream2-push.js | 120 -- ...st-stream2-readable-empty-buffer-no-eof.js | 91 -- .../test-stream2-readable-from-list.js | 68 - .../test-stream2-readable-legacy-drain.js | 52 - .../test-stream2-readable-non-empty-end.js | 57 - .../test-stream2-readable-wrap-empty.js | 24 - test/browser/test-stream2-readable-wrap.js | 86 -- test/browser/test-stream2-set-encoding.js | 317 ----- test/browser/test-stream2-transform.js | 473 ------- test/browser/test-stream2-unpipe-drain.js | 65 - test/browser/test-stream2-writable.js | 375 ------ test/browser/test-stream3-pause-then-read.js | 150 --- test/common/README.md | 802 ------------ test/common/arraystream.js | 61 - test/common/benchmark.js | 78 -- test/common/countdown.js | 80 -- test/common/dns.js | 436 ------- test/common/duplexpair.js | 118 -- test/common/fixtures.js | 74 -- test/common/heap.js | 301 ----- test/common/hijackstdio.js | 73 -- test/common/http2.js | 259 ---- test/common/index.js | 950 -------------- test/common/index.mjs | 107 -- test/common/inspector-helper.js | 789 ------------ test/common/internet.js | 107 -- test/common/ongc.js | 66 - test/common/shared-lib-util.js | 80 -- test/common/tick.js | 48 - test/common/tls.js | 244 ---- test/common/tmpdir.js | 101 -- test/common/wpt.js | 66 - test/fixtures/x1024.txt | 1 - test/ours/lolex-fake-timers.js | 41 - test/ours/test-stream-sync-write.js | 38 - test/parallel/test-readable-from.js | 398 ------ test/parallel/test-readable-large-hwm.js | 50 - test/parallel/test-readable-single-end.js | 37 - test/parallel/test-stream-auto-destroy.js | 99 -- test/parallel/test-stream-backpressure.js | 59 - test/parallel/test-stream-big-packet.js | 125 -- test/parallel/test-stream-big-push.js | 95 -- test/parallel/test-stream-buffer-list.js | 47 - .../test-stream-decoder-objectmode.js | 42 - .../test-stream-destroy-event-order.js | 45 - test/parallel/test-stream-duplex-destroy.js | 254 ---- test/parallel/test-stream-duplex-end.js | 73 -- test/parallel/test-stream-duplex.js | 82 -- test/parallel/test-stream-end-paused.js | 72 -- test/parallel/test-stream-events-prepend.js | 94 -- test/parallel/test-stream-finished.js | 177 --- test/parallel/test-stream-ispaused.js | 64 - .../test-stream-objectmode-undefined.js | 69 - .../test-stream-once-readable-pipe.js | 88 -- test/parallel/test-stream-pipe-after-end.js | 137 -- ...t-stream-pipe-await-drain-manual-resume.js | 100 -- ...tream-pipe-await-drain-push-while-write.js | 59 - test/parallel/test-stream-pipe-await-drain.js | 69 - .../test-stream-pipe-cleanup-pause.js | 58 - test/parallel/test-stream-pipe-cleanup.js | 162 --- .../test-stream-pipe-error-handling.js | 136 -- test/parallel/test-stream-pipe-event.js | 76 -- .../test-stream-pipe-flow-after-unpipe.js | 54 - test/parallel/test-stream-pipe-flow.js | 95 -- .../test-stream-pipe-manual-resume.js | 62 - .../test-stream-pipe-multiple-pipes.js | 113 -- ...test-stream-pipe-same-destination-twice.js | 102 -- .../test-stream-pipe-unpipe-streams.js | 103 -- .../test-stream-pipe-without-listenerCount.js | 39 - ...t-stream-pipeline-queued-end-in-destroy.js | 61 - test/parallel/test-stream-pipeline.js | 483 ------- test/parallel/test-stream-push-order.js | 76 -- test/parallel/test-stream-push-strings.js | 129 -- .../test-stream-readable-async-iterators.js | 816 ------------ ...stream-readable-constructor-set-methods.js | 36 - test/parallel/test-stream-readable-destroy.js | 226 ---- .../test-stream-readable-emittedReadable.js | 87 -- test/parallel/test-stream-readable-event.js | 152 --- .../test-stream-readable-flow-recursion.js | 94 -- .../test-stream-readable-hwm-0-async.js | 44 - ...test-stream-readable-hwm-0-no-flow-data.js | 106 -- test/parallel/test-stream-readable-hwm-0.js | 52 - .../test-stream-readable-infinite-read.js | 54 - .../test-stream-readable-invalid-chunk.js | 48 - .../test-stream-readable-needReadable.js | 108 -- ...st-stream-readable-no-unneeded-readable.js | 87 -- ...stream-readable-object-multi-push-async.js | 232 ---- .../test-stream-readable-pause-and-resume.js | 61 - ...st-stream-readable-readable-then-resume.js | 53 - ...est-stream-readable-reading-readingMore.js | 178 --- .../test-stream-readable-resume-hwm.js | 47 - .../test-stream-readable-resumeScheduled.js | 91 -- ...m-readable-setEncoding-existing-buffers.js | 101 -- .../test-stream-readable-setEncoding-null.js | 39 - ...tream-readable-with-unimplemented-_read.js | 36 - .../test-stream-readableListening-state.js | 51 - .../test-stream-transform-callback-twice.js | 41 - ...tream-transform-constructor-set-methods.js | 62 - .../parallel/test-stream-transform-destroy.js | 181 --- .../test-stream-transform-final-sync.js | 134 -- test/parallel/test-stream-transform-final.js | 136 -- .../test-stream-transform-flush-data.js | 48 - ...tream-transform-objectmode-falsey-value.js | 77 -- ...st-stream-transform-split-highwatermark.js | 136 -- .../test-stream-transform-split-objectmode.js | 99 -- test/parallel/test-stream-uint8array.js | 131 -- test/parallel/test-stream-unpipe-event.js | 209 --- .../test-stream-unshift-empty-chunk.js | 86 -- .../parallel/test-stream-unshift-read-race.js | 174 --- ...stream-writable-change-default-encoding.js | 138 -- ...stream-writable-constructor-set-methods.js | 60 - .../test-stream-writable-decoded-encoding.js | 117 -- test/parallel/test-stream-writable-destroy.js | 286 ----- .../test-stream-writable-ended-state.js | 42 - .../test-stream-writable-finished-state.js | 44 - .../test-stream-writable-needdrain-state.js | 45 - test/parallel/test-stream-writable-null.js | 120 -- .../test-stream-writable-write-cb-twice.js | 74 -- ...est-stream-writable-write-writev-finish.js | 209 --- .../test-stream-writableState-ending.js | 57 - ...ableState-uncorked-bufferedRequestCount.js | 72 -- test/parallel/test-stream-write-destroy.js | 101 -- test/parallel/test-stream-write-final.js | 48 - test/parallel/test-stream-writev.js | 149 --- ...est-stream2-base64-single-char-read-end.js | 83 -- test/parallel/test-stream2-basic.js | 442 ------- test/parallel/test-stream2-compatibility.js | 147 --- test/parallel/test-stream2-decode-partial.js | 48 - test/parallel/test-stream2-finish-pipe.js | 66 - .../parallel/test-stream2-large-read-stall.js | 98 -- test/parallel/test-stream2-objects.js | 394 ------ .../test-stream2-pipe-error-handling.js | 136 -- .../test-stream2-pipe-error-once-listener.js | 119 -- test/parallel/test-stream2-push.js | 143 --- test/parallel/test-stream2-read-sync-stack.js | 66 - ...st-stream2-readable-empty-buffer-no-eof.js | 159 --- .../test-stream2-readable-from-list.js | 135 -- .../test-stream2-readable-legacy-drain.js | 86 -- .../test-stream2-readable-non-empty-end.js | 102 -- .../test-stream2-readable-wrap-empty.js | 60 - test/parallel/test-stream2-set-encoding.js | 285 ----- test/parallel/test-stream2-transform.js | 576 --------- test/parallel/test-stream2-unpipe-drain.js | 134 -- test/parallel/test-stream2-unpipe-leak.js | 138 -- test/parallel/test-stream2-writable.js | 465 ------- test/parallel/test-stream3-cork-end.js | 106 -- test/parallel/test-stream3-cork-uncork.js | 102 -- test/parallel/test-stream3-pause-then-read.js | 201 --- test/parallel/test-streams-highwatermark.js | 97 -- 282 files changed, 5154 insertions(+), 28088 deletions(-) delete mode 100644 .babelrc create mode 100644 .eslintrc.js delete mode 100644 .npmignore delete mode 100644 build/.gitignore delete mode 100755 build/build.js create mode 100644 build/build.mjs delete mode 100644 build/common-replacements.js delete mode 100644 build/files.js create mode 100644 build/files.mjs create mode 100644 build/footers.mjs create mode 100644 build/replacements.mjs delete mode 100644 build/test-replacements.js create mode 100644 c8.json delete mode 100644 doc/wg-meetings/2015-01-30.md delete mode 100644 errors-browser.js delete mode 100644 errors.js delete mode 100644 examples/CAPSLOCKTYPER.JS create mode 100644 examples/capslock-type.cjs delete mode 100644 examples/typer.js create mode 100644 examples/typer.mjs delete mode 100644 experimentalWarning.js delete mode 100644 lib/_stream_duplex.js delete mode 100644 lib/_stream_passthrough.js delete mode 100644 lib/_stream_readable.js delete mode 100644 lib/_stream_transform.js delete mode 100644 lib/_stream_writable.js delete mode 100644 lib/internal/streams/async_iterator.js delete mode 100644 lib/internal/streams/buffer_list.js delete mode 100644 lib/internal/streams/destroy.js delete mode 100644 lib/internal/streams/end-of-stream.js delete mode 100644 lib/internal/streams/from-browser.js delete mode 100644 lib/internal/streams/from.js delete mode 100644 lib/internal/streams/pipeline.js delete mode 100644 lib/internal/streams/state.js delete mode 100644 lib/internal/streams/stream-browser.js delete mode 100644 lib/internal/streams/stream.js create mode 100644 prettier.config.cjs delete mode 100644 readable-browser.js delete mode 100644 readable.js create mode 100644 src/browser.js create mode 100644 src/index.js create mode 100644 src/test/browser/test-stream-big-packet.js create mode 100644 src/test/browser/test-stream-big-push.js create mode 100644 src/test/browser/test-stream-duplex.js create mode 100644 src/test/browser/test-stream-end-paused.js create mode 100644 src/test/browser/test-stream-finished.js create mode 100644 src/test/browser/test-stream-ispaused.js create mode 100644 src/test/browser/test-stream-pipe-after-end.js create mode 100644 src/test/browser/test-stream-pipe-cleanup-pause.js create mode 100644 src/test/browser/test-stream-pipe-cleanup.js create mode 100644 src/test/browser/test-stream-pipe-error-handling.js create mode 100644 src/test/browser/test-stream-pipe-event.js create mode 100644 src/test/browser/test-stream-pipe-without-listenerCount.js create mode 100644 src/test/browser/test-stream-pipeline.js create mode 100644 src/test/browser/test-stream-push-order.js create mode 100644 src/test/browser/test-stream-push-strings.js create mode 100644 src/test/browser/test-stream-readable-constructor-set-methods.js create mode 100644 src/test/browser/test-stream-readable-event.js create mode 100644 src/test/browser/test-stream-sync-write.js create mode 100644 src/test/browser/test-stream-transform-constructor-set-methods.js create mode 100644 src/test/browser/test-stream-transform-objectmode-falsey-value.js create mode 100644 src/test/browser/test-stream-transform-split-objectmode.js create mode 100644 src/test/browser/test-stream-unshift-empty-chunk.js create mode 100644 src/test/browser/test-stream-unshift-read-race.js create mode 100644 src/test/browser/test-stream-writable-change-default-encoding.js create mode 100644 src/test/browser/test-stream-writable-constructor-set-methods.js create mode 100644 src/test/browser/test-stream-writable-decoded-encoding.js create mode 100644 src/test/browser/test-stream-writev.js create mode 100644 src/test/browser/test-stream2-base64-single-char-read-end.js create mode 100644 src/test/browser/test-stream2-compatibility.js create mode 100644 src/test/browser/test-stream2-large-read-stall.js create mode 100644 src/test/browser/test-stream2-objects.js create mode 100644 src/test/browser/test-stream2-pipe-error-handling.js create mode 100644 src/test/browser/test-stream2-pipe-error-once-listener.js create mode 100644 src/test/browser/test-stream2-push.js create mode 100644 src/test/browser/test-stream2-readable-empty-buffer-no-eof.js create mode 100644 src/test/browser/test-stream2-readable-from-list.js create mode 100644 src/test/browser/test-stream2-readable-legacy-drain.js create mode 100644 src/test/browser/test-stream2-readable-non-empty-end.js create mode 100644 src/test/browser/test-stream2-readable-wrap-empty.js create mode 100644 src/test/browser/test-stream2-readable-wrap.js create mode 100644 src/test/browser/test-stream2-set-encoding.js create mode 100644 src/test/browser/test-stream2-transform.js create mode 100644 src/test/browser/test-stream2-unpipe-drain.js create mode 100644 src/test/browser/test-stream2-writable.js create mode 100644 src/test/browser/test-stream3-pause-then-read.js rename test/ours/errors.js => src/test/ours/test-errors.js (69%) create mode 100644 src/test/ours/test-lolex-fake-timers.js create mode 100644 src/test/ours/test-stream-sync-write.js create mode 100644 src/test/test-browser.js create mode 100644 src/util.js create mode 100644 src/uv-browser.js create mode 100644 tap.yml delete mode 100644 test/browser.js delete mode 100644 test/browser/test-stream-big-packet.js delete mode 100644 test/browser/test-stream-big-push.js delete mode 100644 test/browser/test-stream-duplex.js delete mode 100644 test/browser/test-stream-end-paused.js delete mode 100644 test/browser/test-stream-finished.js delete mode 100644 test/browser/test-stream-ispaused.js delete mode 100644 test/browser/test-stream-pipe-after-end.js delete mode 100644 test/browser/test-stream-pipe-cleanup-pause.js delete mode 100644 test/browser/test-stream-pipe-cleanup.js delete mode 100644 test/browser/test-stream-pipe-error-handling.js delete mode 100644 test/browser/test-stream-pipe-event.js delete mode 100644 test/browser/test-stream-pipe-without-listenerCount.js delete mode 100644 test/browser/test-stream-pipeline.js delete mode 100644 test/browser/test-stream-push-order.js delete mode 100644 test/browser/test-stream-push-strings.js delete mode 100644 test/browser/test-stream-readable-constructor-set-methods.js delete mode 100644 test/browser/test-stream-readable-event.js delete mode 100644 test/browser/test-stream-sync-write.js delete mode 100644 test/browser/test-stream-transform-constructor-set-methods.js delete mode 100644 test/browser/test-stream-transform-objectmode-falsey-value.js delete mode 100644 test/browser/test-stream-transform-split-objectmode.js delete mode 100644 test/browser/test-stream-unshift-empty-chunk.js delete mode 100644 test/browser/test-stream-unshift-read-race.js delete mode 100644 test/browser/test-stream-writable-change-default-encoding.js delete mode 100644 test/browser/test-stream-writable-constructor-set-methods.js delete mode 100644 test/browser/test-stream-writable-decoded-encoding.js delete mode 100644 test/browser/test-stream-writev.js delete mode 100644 test/browser/test-stream2-base64-single-char-read-end.js delete mode 100644 test/browser/test-stream2-compatibility.js delete mode 100644 test/browser/test-stream2-large-read-stall.js delete mode 100644 test/browser/test-stream2-objects.js delete mode 100644 test/browser/test-stream2-pipe-error-handling.js delete mode 100644 test/browser/test-stream2-pipe-error-once-listener.js delete mode 100644 test/browser/test-stream2-push.js delete mode 100644 test/browser/test-stream2-readable-empty-buffer-no-eof.js delete mode 100644 test/browser/test-stream2-readable-from-list.js delete mode 100644 test/browser/test-stream2-readable-legacy-drain.js delete mode 100644 test/browser/test-stream2-readable-non-empty-end.js delete mode 100644 test/browser/test-stream2-readable-wrap-empty.js delete mode 100644 test/browser/test-stream2-readable-wrap.js delete mode 100644 test/browser/test-stream2-set-encoding.js delete mode 100644 test/browser/test-stream2-transform.js delete mode 100644 test/browser/test-stream2-unpipe-drain.js delete mode 100644 test/browser/test-stream2-writable.js delete mode 100644 test/browser/test-stream3-pause-then-read.js delete mode 100644 test/common/README.md delete mode 100644 test/common/arraystream.js delete mode 100644 test/common/benchmark.js delete mode 100644 test/common/countdown.js delete mode 100644 test/common/dns.js delete mode 100644 test/common/duplexpair.js delete mode 100644 test/common/fixtures.js delete mode 100644 test/common/heap.js delete mode 100644 test/common/hijackstdio.js delete mode 100644 test/common/http2.js delete mode 100644 test/common/index.js delete mode 100644 test/common/index.mjs delete mode 100644 test/common/inspector-helper.js delete mode 100644 test/common/internet.js delete mode 100644 test/common/ongc.js delete mode 100644 test/common/shared-lib-util.js delete mode 100644 test/common/tick.js delete mode 100644 test/common/tls.js delete mode 100644 test/common/tmpdir.js delete mode 100644 test/common/wpt.js delete mode 100644 test/fixtures/x1024.txt delete mode 100644 test/ours/lolex-fake-timers.js delete mode 100644 test/ours/test-stream-sync-write.js delete mode 100644 test/parallel/test-readable-from.js delete mode 100644 test/parallel/test-readable-large-hwm.js delete mode 100644 test/parallel/test-readable-single-end.js delete mode 100644 test/parallel/test-stream-auto-destroy.js delete mode 100644 test/parallel/test-stream-backpressure.js delete mode 100644 test/parallel/test-stream-big-packet.js delete mode 100644 test/parallel/test-stream-big-push.js delete mode 100644 test/parallel/test-stream-buffer-list.js delete mode 100644 test/parallel/test-stream-decoder-objectmode.js delete mode 100644 test/parallel/test-stream-destroy-event-order.js delete mode 100644 test/parallel/test-stream-duplex-destroy.js delete mode 100644 test/parallel/test-stream-duplex-end.js delete mode 100644 test/parallel/test-stream-duplex.js delete mode 100644 test/parallel/test-stream-end-paused.js delete mode 100644 test/parallel/test-stream-events-prepend.js delete mode 100644 test/parallel/test-stream-finished.js delete mode 100644 test/parallel/test-stream-ispaused.js delete mode 100644 test/parallel/test-stream-objectmode-undefined.js delete mode 100644 test/parallel/test-stream-once-readable-pipe.js delete mode 100644 test/parallel/test-stream-pipe-after-end.js delete mode 100644 test/parallel/test-stream-pipe-await-drain-manual-resume.js delete mode 100644 test/parallel/test-stream-pipe-await-drain-push-while-write.js delete mode 100644 test/parallel/test-stream-pipe-await-drain.js delete mode 100644 test/parallel/test-stream-pipe-cleanup-pause.js delete mode 100644 test/parallel/test-stream-pipe-cleanup.js delete mode 100644 test/parallel/test-stream-pipe-error-handling.js delete mode 100644 test/parallel/test-stream-pipe-event.js delete mode 100644 test/parallel/test-stream-pipe-flow-after-unpipe.js delete mode 100644 test/parallel/test-stream-pipe-flow.js delete mode 100644 test/parallel/test-stream-pipe-manual-resume.js delete mode 100644 test/parallel/test-stream-pipe-multiple-pipes.js delete mode 100644 test/parallel/test-stream-pipe-same-destination-twice.js delete mode 100644 test/parallel/test-stream-pipe-unpipe-streams.js delete mode 100644 test/parallel/test-stream-pipe-without-listenerCount.js delete mode 100644 test/parallel/test-stream-pipeline-queued-end-in-destroy.js delete mode 100644 test/parallel/test-stream-pipeline.js delete mode 100644 test/parallel/test-stream-push-order.js delete mode 100644 test/parallel/test-stream-push-strings.js delete mode 100644 test/parallel/test-stream-readable-async-iterators.js delete mode 100644 test/parallel/test-stream-readable-constructor-set-methods.js delete mode 100644 test/parallel/test-stream-readable-destroy.js delete mode 100644 test/parallel/test-stream-readable-emittedReadable.js delete mode 100644 test/parallel/test-stream-readable-event.js delete mode 100644 test/parallel/test-stream-readable-flow-recursion.js delete mode 100644 test/parallel/test-stream-readable-hwm-0-async.js delete mode 100644 test/parallel/test-stream-readable-hwm-0-no-flow-data.js delete mode 100644 test/parallel/test-stream-readable-hwm-0.js delete mode 100644 test/parallel/test-stream-readable-infinite-read.js delete mode 100644 test/parallel/test-stream-readable-invalid-chunk.js delete mode 100644 test/parallel/test-stream-readable-needReadable.js delete mode 100644 test/parallel/test-stream-readable-no-unneeded-readable.js delete mode 100644 test/parallel/test-stream-readable-object-multi-push-async.js delete mode 100644 test/parallel/test-stream-readable-pause-and-resume.js delete mode 100644 test/parallel/test-stream-readable-readable-then-resume.js delete mode 100644 test/parallel/test-stream-readable-reading-readingMore.js delete mode 100644 test/parallel/test-stream-readable-resume-hwm.js delete mode 100644 test/parallel/test-stream-readable-resumeScheduled.js delete mode 100644 test/parallel/test-stream-readable-setEncoding-existing-buffers.js delete mode 100644 test/parallel/test-stream-readable-setEncoding-null.js delete mode 100644 test/parallel/test-stream-readable-with-unimplemented-_read.js delete mode 100644 test/parallel/test-stream-readableListening-state.js delete mode 100644 test/parallel/test-stream-transform-callback-twice.js delete mode 100644 test/parallel/test-stream-transform-constructor-set-methods.js delete mode 100644 test/parallel/test-stream-transform-destroy.js delete mode 100644 test/parallel/test-stream-transform-final-sync.js delete mode 100644 test/parallel/test-stream-transform-final.js delete mode 100644 test/parallel/test-stream-transform-flush-data.js delete mode 100644 test/parallel/test-stream-transform-objectmode-falsey-value.js delete mode 100644 test/parallel/test-stream-transform-split-highwatermark.js delete mode 100644 test/parallel/test-stream-transform-split-objectmode.js delete mode 100644 test/parallel/test-stream-uint8array.js delete mode 100644 test/parallel/test-stream-unpipe-event.js delete mode 100644 test/parallel/test-stream-unshift-empty-chunk.js delete mode 100644 test/parallel/test-stream-unshift-read-race.js delete mode 100644 test/parallel/test-stream-writable-change-default-encoding.js delete mode 100644 test/parallel/test-stream-writable-constructor-set-methods.js delete mode 100644 test/parallel/test-stream-writable-decoded-encoding.js delete mode 100644 test/parallel/test-stream-writable-destroy.js delete mode 100644 test/parallel/test-stream-writable-ended-state.js delete mode 100644 test/parallel/test-stream-writable-finished-state.js delete mode 100644 test/parallel/test-stream-writable-needdrain-state.js delete mode 100644 test/parallel/test-stream-writable-null.js delete mode 100644 test/parallel/test-stream-writable-write-cb-twice.js delete mode 100644 test/parallel/test-stream-writable-write-writev-finish.js delete mode 100644 test/parallel/test-stream-writableState-ending.js delete mode 100644 test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js delete mode 100644 test/parallel/test-stream-write-destroy.js delete mode 100644 test/parallel/test-stream-write-final.js delete mode 100644 test/parallel/test-stream-writev.js delete mode 100644 test/parallel/test-stream2-base64-single-char-read-end.js delete mode 100644 test/parallel/test-stream2-basic.js delete mode 100644 test/parallel/test-stream2-compatibility.js delete mode 100644 test/parallel/test-stream2-decode-partial.js delete mode 100644 test/parallel/test-stream2-finish-pipe.js delete mode 100644 test/parallel/test-stream2-large-read-stall.js delete mode 100644 test/parallel/test-stream2-objects.js delete mode 100644 test/parallel/test-stream2-pipe-error-handling.js delete mode 100644 test/parallel/test-stream2-pipe-error-once-listener.js delete mode 100644 test/parallel/test-stream2-push.js delete mode 100644 test/parallel/test-stream2-read-sync-stack.js delete mode 100644 test/parallel/test-stream2-readable-empty-buffer-no-eof.js delete mode 100644 test/parallel/test-stream2-readable-from-list.js delete mode 100644 test/parallel/test-stream2-readable-legacy-drain.js delete mode 100644 test/parallel/test-stream2-readable-non-empty-end.js delete mode 100644 test/parallel/test-stream2-readable-wrap-empty.js delete mode 100644 test/parallel/test-stream2-set-encoding.js delete mode 100644 test/parallel/test-stream2-transform.js delete mode 100644 test/parallel/test-stream2-unpipe-drain.js delete mode 100644 test/parallel/test-stream2-unpipe-leak.js delete mode 100644 test/parallel/test-stream2-writable.js delete mode 100644 test/parallel/test-stream3-cork-end.js delete mode 100644 test/parallel/test-stream3-cork-uncork.js delete mode 100644 test/parallel/test-stream3-pause-then-read.js delete mode 100644 test/parallel/test-streams-highwatermark.js diff --git a/.airtap.yml b/.airtap.yml index fe0435ff1e..56fcbc7a04 100644 --- a/.airtap.yml +++ b/.airtap.yml @@ -3,7 +3,6 @@ providers: browsers: - name: chrome - - name: ie - name: firefox - name: safari - name: edge diff --git a/.babelrc b/.babelrc deleted file mode 100644 index 465861f3d1..0000000000 --- a/.babelrc +++ /dev/null @@ -1,17 +0,0 @@ -{ - "presets": [ - ["@babel/preset-env", { - "targets": [ - "last 2 versions", - "not dead", - "node 6.0" - ], - "modules": "commonjs", - "exclude": [ - "transform-regenerator", - "transform-typeof-symbol" - ], - "debug": true - }] - ] -} diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000000..7b93eee375 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,11 @@ +module.exports = { + extends: ['standard'], + rules: { + /* + This is inserted to make this compatible with prettier. + Once https://github.com/prettier/prettier/issues/3845 and https://github.com/prettier/prettier/issues/3847 are solved this might be not needed any more. + */ + 'space-before-function-paren': 0, + curly: [2, 'all'] + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f49edcaecf..f643098724 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,4 +1,4 @@ -name: Node.js +name: Node.js Build on: [push, pull_request] @@ -11,12 +11,12 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] node-version: [6.x, 8.x, 10.x, 12.x, 14.x] steps: - - uses: actions/checkout@v1 - - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: npm install - run: npm install - - name: npm run test - run: npm run test + - uses: actions/checkout@v1 + - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: npm install + run: npm install + - name: npm run test + run: npm run test diff --git a/.github/workflows/sauce.yml b/.github/workflows/sauce.yml index a13fc719a6..07d7d65586 100644 --- a/.github/workflows/sauce.yml +++ b/.github/workflows/sauce.yml @@ -1,4 +1,4 @@ -name: Sauce Labs +name: Sauce Labs Build on: push jobs: test: diff --git a/.gitignore b/.gitignore index 098013154a..2cb19adc14 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,5 @@ +./lib/ node_modules/ -.zuul.yml -.nyc_output -coverage -package-lock.json -*.tap -.airtaprc -yarn.lock +node-*.tar.gz +./test/ +package-lock.json \ No newline at end of file diff --git a/.npmignore b/.npmignore deleted file mode 100644 index 3ba4f832ce..0000000000 --- a/.npmignore +++ /dev/null @@ -1,12 +0,0 @@ -build/ -test/ -examples/ -fs.js -zlib.js -.airtap.yml -.airtaprc -.babelrc -.nyc_output -.github -coverage -doc/ diff --git a/README.md b/README.md index d03b064b9f..6f132562cd 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,11 @@ # readable-stream -***Node.js core streams for userland*** +**_Node.js core streams for userland_** -[![npm status](http://img.shields.io/npm/v/readable-stream.svg)](https://www.npmjs.org/package/readable-stream) +[![npm status](https://img.shields.io/npm/v/readable-stream.svg)](https://npm.im/readable-stream) [![node](https://img.shields.io/node/v/readable-stream.svg)](https://www.npmjs.org/package/readable-stream) -![Node.js](https://github.com/nodejs/readable-stream/workflows/Node.js/badge.svg?branch=main) -![Sauce Labs](https://github.com/nodejs/readable-stream/workflows/Sauce%20Labs/badge.svg?branch=main) +[![Node.js Build](https://github.com/nodejs/readable-stream/workflows/Node.js%20Build/badge.svg)](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js%20Build) +[![Sauce Labs Build](https://github.com/nodejs/readable-stream/workflows/Sauce%20Labs%20Build/badge.svg)](https://github.com/nodejs/readable-stream/actions?query=workflow%3ASauce%20Labs%20Build) [![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) @@ -18,7 +18,7 @@ This package is a mirror of the streams implementations in Node.js. Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). +Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). As of version 2.0.0 **readable-stream** uses semantic versioning. @@ -48,6 +48,7 @@ v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, https://github.com/nodejs/node/pull/17979 ## Version 2.x.x + v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. ### Big Thanks @@ -61,15 +62,8 @@ without any changes, if you are just using one of the main classes and functions. ```js -const { - Readable, - Writable, - Transform, - Duplex, - pipeline, - finished -} = require('readable-stream') -```` +const { Readable, Writable, Transform, Duplex, pipeline, finished } = require('readable-stream') +``` Note that `require('stream')` will return `Stream`, while `require('readable-stream')` will return `Readable`. We discourage using @@ -106,23 +100,24 @@ module.exports = { oversees the development and maintenance of the Streams API within Node.js. The responsibilities of the Streams Working Group include: -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this +- Addressing stream issues on the Node.js issue tracker. +- Authoring and editing stream documentation within the Node.js project. +- Reviewing changes to stream subclasses within the Node.js project. +- Redirecting changes to streams from the Node.js project to this project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance +- Assisting in the implementation of stream providers within Node.js. +- Recommending versions of `readable-stream` to be included in Node.js. +- Messaging about the future of streams to give the community advance notice of changes. + ## Team Members -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> +- **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +- **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E -* **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com> -* **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl> +- **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com> +- **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl> [sauce]: https://saucelabs.com diff --git a/build/.gitignore b/build/.gitignore deleted file mode 100644 index 3c3629e647..0000000000 --- a/build/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/build/build.js b/build/build.js deleted file mode 100755 index 3d80f2b5b4..0000000000 --- a/build/build.js +++ /dev/null @@ -1,194 +0,0 @@ -#!/usr/bin/env node - -const hyperquest = require('hyperquest') - , bl = require('bl') - , fs = require('fs') - , path = require('path') - , tar = require('tar-fs') - , gunzip = require('gunzip-maybe') - , babel = require('@babel/core') - , glob = require('glob') - , pump = require('pump') - , rimraf = require('rimraf') - , encoding = 'utf8' - , urlRegex = /^https?:\/\// - , nodeVersion = process.argv[2] - , nodeVersionRegexString = '\\d+\\.\\d+\\.\\d+' - , usageVersionRegex = RegExp('^' + nodeVersionRegexString + '$') - , readmeVersionRegex = - RegExp('((?:(?:Node-core )|(?:https\:\/\/nodejs\.org\/dist\/))v)' + nodeVersionRegexString, 'g') - - , readmePath = path.join(__dirname, '..', 'README.md') - , files = require('./files') - , testReplace = require('./test-replacements') - - , downloadurl = `https://nodejs.org/dist/v${nodeVersion}/node-v${nodeVersion}.tar.gz` - , src = path.join(__dirname, `node-v${nodeVersion}`) - , libsrcurl = path.join(src, 'lib/') - , testsrcurl = path.join(src, 'test/parallel/') - , libourroot = path.join(__dirname, '../lib/') - , testourroot = path.join(__dirname, '../test/parallel/') - - -if (!usageVersionRegex.test(nodeVersion)) { - console.error('Usage: build.js xx.yy.zz') - return process.exit(1); -} - -// `inputLoc`: URL or local path. -function processFile (inputLoc, out, replacements, addAtEnd) { - var file = fs.createReadStream(inputLoc, encoding) - - file.pipe(bl(function (err, data) { - if (err) throw err - - console.log('Processing', inputLoc) - data = data.toString() - replacements.forEach(function (replacement) { - const regexp = replacement[0] - var arg2 = replacement[1] - if (typeof arg2 === 'function') - arg2 = arg2.bind(data) - if (arg2 === undefined) { - console.error('missing second arg for file', inputLoc, replacement) - throw new Error('missing second arg in replacement') - } - data = data.replace(regexp, arg2) - }) - - if (addAtEnd) { - data += addAtEnd - } - if (inputLoc.slice(-3) === '.js') { - try { - const transformed = babel.transform(data, { - // Required for babel to pick up .babelrc - filename: inputLoc - }) - data = transformed.code - } catch (err) { - fs.writeFile(out + '.errored.js', data, encoding, function () { - console.log('Wrote errored', out) - - throw err - }) - return - } - } - fs.writeFile(out, data, encoding, function (err) { - if (err) throw err - - console.log('Wrote', out) - }) - })) -} -function deleteOldTests(){ - const files = fs.readdirSync(path.join(__dirname, '..', 'test', 'parallel')); - for (let file of files) { - let name = path.join(__dirname, '..', 'test', 'parallel', file); - console.log('Removing', name); - fs.unlinkSync(name); - } -} -function processLibFile (file) { - var replacements = files[file] - , url = libsrcurl + file - , out = path.join(libourroot, file) - - processFile(url, out, replacements) -} - - -function processTestFile (file) { - var replacements = testReplace.all - , url = testsrcurl + file - , out = path.join(testourroot, file) - - if (testReplace[file]) - replacements = replacements.concat(testReplace[file]) - - processFile(url, out, replacements, ';(function () { var t = require(\'tap\'); t.pass(\'sync run\'); })();var _list = process.listeners(\'uncaughtException\'); process.removeAllListeners(\'uncaughtException\'); _list.pop(); _list.forEach((e) => process.on(\'uncaughtException\', e));') -} - -//-------------------------------------------------------------------- -// Download the release from nodejs.org -console.log(`Downloading ${downloadurl}`) -pump( - hyperquest(downloadurl), - gunzip(), - tar.extract(__dirname), - function (err) { - if (err) { - throw err - } - - //-------------------------------------------------------------------- - // Grab & process files in ../lib/ - - Object.keys(files).forEach(processLibFile) - - - //-------------------------------------------------------------------- - // Discover, grab and process all test-stream* files on the given release - - glob(path.join(testsrcurl, 'test-@(stream|readable)*.js'), function (err, list) { - if (err) { - throw err - } - - list.forEach(function (file) { - file = path.basename(file) - if (!/-wrap(?:-encoding)?\.js$/.test(file) && - file !== 'test-stream2-httpclient-response-end.js' && - file !== 'test-stream-base-no-abort.js' && - file !== 'test-stream-preprocess.js' && - file !== 'test-stream-inheritance.js' && - file !== 'test-stream-base-prototype-accessors.js' && - file !== 'test-stream-base-prototype-accessors-enumerability.js' && - file !== 'test-stream-wrap-drain.js' && - file !== 'test-stream-pipeline-http2.js' && - file !== 'test-stream-base-typechecking.js') { - processTestFile(file) - } - }) - }) - - //-------------------------------------------------------------------- - // Grab the nodejs/node test/common.js - - glob(path.join(src, 'test/common/*'), function (err, list) { - if (err) { - throw err - } - - list.forEach(function (file) { - file = path.basename(file) - processFile( - path.join(testsrcurl.replace(/parallel[/\\]$/, 'common/'), file) - , path.join(testourroot.replace('parallel', 'common'), file) - , testReplace['common.js'] - ) - }) - }) - - //-------------------------------------------------------------------- - // Update Node version in README - processFile(readmePath, readmePath, [ - [readmeVersionRegex, "$1" + nodeVersion] - ]) - } -) - -// delete the current contents of test/parallel so if node removes any tests -// they are removed here -deleteOldTests(); - -process.once('beforeExit', function () { - rimraf(src, function (err) { - if (err) { - throw err - } - - console.log('Removed', src) - }) -}) diff --git a/build/build.mjs b/build/build.mjs new file mode 100644 index 0000000000..b4b3245075 --- /dev/null +++ b/build/build.mjs @@ -0,0 +1,185 @@ +import { createReadStream } from 'node:fs' +import { copyFile, cp, mkdir, rm, writeFile } from 'node:fs/promises' +import { dirname, resolve } from 'node:path' +import { finished } from 'node:stream/promises' +import { Parse } from 'tar' +import { request } from 'undici' +import { aliases, skippedSources, sources } from './files.mjs' +import { footers } from './footers.mjs' +import { replacements } from './replacements.mjs' + +const baseMatcher = /^lib|test/ + +function highlightFile(file, color) { + return `\x1b[${color}m${file.replace(process.cwd() + '/', '')}\x1b[0m` +} + +async function extract(nodeVersion, tarFile) { + const sourcesMatcher = sources.map((s) => new RegExp(s)) + + console.log(`Extracting Node.js ${nodeVersion} tar file ...`) + const contents = [] + const tarPrefix = `node-v${nodeVersion}/` + const parser = new Parse() + + parser.on('entry', (entry) => { + const dst = entry.path.replace(tarPrefix, '') + + if ( + entry.type === 'Directory' || + skippedSources.includes(dst) || + !baseMatcher.test(dst) || + !sourcesMatcher.some((s) => s.test(dst)) + ) { + return entry.resume() + } + + let buffer = Buffer.alloc(0) + + entry.on('data', (chunk) => { + buffer = Buffer.concat([buffer, chunk]) + }) + + entry.on('end', () => { + const content = buffer.toString('utf-8') + + // Enqueue file + contents.push([dst, content]) + + // Some special cases when file aliasing is needed + if (aliases[dst]) { + for (const alias of aliases[dst]) { + contents.push([alias, content]) + } + } + }) + + entry.resume() + }) + + await finished(tarFile.pipe(parser)) + return contents +} + +async function processFiles(contents) { + const replacementsKeys = Object.keys(replacements) + const footersKeys = Object.keys(footers) + + for (let [path, content] of contents) { + const modifications = [] + const matchingReplacements = replacementsKeys.filter((k) => new RegExp(k).test(path)) + const matchingFooters = footersKeys.filter((k) => new RegExp(k).test(path)) + + // Perform replacements + if (matchingReplacements.length) { + modifications.push(highlightFile('replacements', 33)) + + for (const matching of matchingReplacements) { + for (const [from, to] of replacements[matching]) { + content = content.replaceAll(new RegExp(from, 'gm'), to) + } + } + } + + // Append trailers + if (matchingReplacements.length) { + modifications.push(highlightFile('footers', 33)) + + for (const footerKey of matchingFooters) { + for (const footer of footers[footerKey]) { + content += footer + } + } + } + + if (!modifications.length) { + modifications.push('no modifications') + } + + // Write the file + console.log(`Creating file ${highlightFile(path, 32)} (${modifications.join(', ')}) ...`) + await writeFile(path, content, 'utf-8') + } +} + +async function downloadNode(nodeVersion) { + // Download node + const downloadUrl = `https://nodejs.org/v${nodeVersion}/node-v${nodeVersion}.tar.gz` + console.log(`Downloading ${downloadUrl} ...`) + const { statusCode, body } = await request(downloadUrl, { pipelining: 0 }) + + if (statusCode !== 200) { + console.log(`Downloading failed with HTTP code ${statusCode}.`) + process.exit(1) + } + + return body +} + +async function main() { + const rootDir = resolve(dirname(new URL(import.meta.url).pathname), '..') + + if (process.cwd() !== rootDir) { + console.error('Please run this from the root directory of readable-stream repository.') + return process.exit(1) + } + + const nodeVersion = process.argv[2] + + if (!nodeVersion?.match(/^\d+\.\d+\.\d+/)) { + console.error('Usage: build.js xx.yy.zz [node.tar.gz]') + return process.exit(1) + } + + // Cleanup existing folder + await rm('lib', { recursive: true, force: true }) + await rm('test', { recursive: true, force: true }) + + // Download or open the tar file + let tarFile + + if (process.argv[3]) { + tarFile = createReadStream(process.argv[3]) + } else { + tarFile = await downloadNode(nodeVersion) + } + + // Extract and process contents + const contents = await extract(nodeVersion, tarFile) + + await mkdir('lib/internal/streams', { recursive: true, force: true }) + await mkdir('test/common', { recursive: true, force: true }) + await mkdir('test/parallel', { recursive: true, force: true }) + + await processFiles(contents) + + // Copy template files + console.log(`Copying template to file ${highlightFile('lib/browser.js', 32)} ...`) + await copyFile('src/browser.js', 'lib/browser.js') + + console.log(`Copying template to file ${highlightFile('lib/index.js', 32)} ...`) + await copyFile('src/index.js', 'lib/index.js') + + console.log(`Copying template to file ${highlightFile('lib/util.js', 32)} ...`) + await copyFile('src/util.js', 'lib/util.js') + + console.log(`Copying template to file ${highlightFile('lib/internal/uv-browser.js', 32)} ...`) + await copyFile('src/uv-browser.js', 'lib/internal/uv-browser.js') + + console.log(`Copying template to file ${highlightFile('test/test-browser.js', 32)} ...`) + await copyFile('src/test/test-browser.js', 'test/test-browser.js') + + console.log(`Copying template to file ${highlightFile('test/browser', 32)} ...`) + await cp('src/test/browser', 'test/browser', { recursive: true }) + + console.log(`Copying template to file ${highlightFile('test/ours', 32)} ...`) + await cp('src/test/ours', 'test/ours', { recursive: true }) + + // TODO@PI + // // Update Node version in README + // // processFile(readmePath, readmePath, [ + // // [readmeVersionRegex, "$1" + nodeVersion] + // // ]) +} + +await main() diff --git a/build/common-replacements.js b/build/common-replacements.js deleted file mode 100644 index e17f5d7910..0000000000 --- a/build/common-replacements.js +++ /dev/null @@ -1,59 +0,0 @@ -module.exports.altForEachImplReplacement = [ - /$/ - , '\nfunction forEach (xs, f) {\n' - + ' for (var i = 0, l = xs.length; i < l; i++) {\n' - + ' f(xs[i], i);\n' - + ' }\n' - + '}\n' -] - -module.exports.altForEachUseReplacement = [ - /(\W)([\w\.\(\),\[\] ']+)(\.forEach\()/gm - , '$1forEach($2, ' -] - -module.exports.specialForEachReplacment = [ - /(\W)(\[(?:\d\,\s)+\d\])(\.forEach\()/gm - , '$1forEach($2, ' -] - -module.exports.altIndexOfImplReplacement = [ - /$/ - , '\nfunction indexOf (xs, x) {\n' - + ' for (var i = 0, l = xs.length; i < l; i++) {\n' - + ' if (xs[i] === x) return i;\n' - + ' }\n' - + ' return -1;\n' - + '}\n' -] - -module.exports.altIndexOfUseReplacement = [ - /(\W)([\w\.\(\),\[\]]+)(\.indexOf\()/gm - , '$1indexOf($2, ' -] -module.exports.objectKeysDefine = [ - /^('use strict';)$/m - , '$1\n\n/**/\nvar objectKeys = Object.keys || function (obj) {\n' - + ' var keys = [];\n' - + ' for (var key in obj) keys.push(key);\n' - + ' return keys;\n' - + '}\n/**/\n' -] - -module.exports.objectKeysReplacement = [ - /Object\.keys/g - , 'objectKeys' - ] - - -module.exports.bufferShimFix = [ - /^('use strict';)$/m, - `/**/ - const bufferShim = require('safe-buffer').Buffer; - /**/` -] - -module.exports.bufferStaticMethods = [ - /Buffer\.((?:alloc)|(?:allocUnsafe)|(?:from))/g, - `bufferShim.$1` -] diff --git a/build/files.js b/build/files.js deleted file mode 100644 index 96d5210947..0000000000 --- a/build/files.js +++ /dev/null @@ -1,381 +0,0 @@ -/* This file lists the files to be fetched from the node repo - * in the /lib/ directory which will be placed in the ../lib/ - * directory after having each of the "replacements" in the - * array for that file applied to it. The replacements are - * simply the arguments to String#replace, so they can be - * strings, regexes, functions. - */ - -const headRegexp = /(^module.exports = \w+;?)/m - - , requireReplacement = [ - /(require\(['"])(_stream_)/g - , '$1./$2' - ] - , instanceofReplacement = [ - /instanceof Stream\.(\w+)/g - , function (match, streamType) { - return 'instanceof ' + streamType - } - ] - - // use the string_decoder in node_modules rather than core - , stringDecoderReplacement = [ - /(require\(['"])(string_decoder)(['"]\))/g - , '$1$2/$3' - ] - - // The browser build ends up with a circular dependency, so the require is - // done lazily, but cached. - , addDuplexDec = [ - headRegexp - , '$1\n\n/**/\nvar Duplex;\n/**/\n' - ] - , addDuplexRequire = [ - /^(function (?:Writable|Readable)(?:State)?.*{)/gm - , '\n$1\n Duplex = Duplex || require(\'./_stream_duplex\');\n' - ] - - , altIndexOfImplReplacement = require('./common-replacements').altIndexOfImplReplacement - , altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement - - , utilReplacement = [ - /^const util = require\('util'\);/m - , '' - ] - - , inherits = [ - /^util.inherits/m - , 'require(\'inherits\')' - ] - - , debugLogReplacement = [ - /const debug = util.debuglog\('stream'\);/ - , '\n\n/**/\nconst debugUtil = require(\'util\');\n' - + 'let debug;\n' - + 'if (debugUtil && debugUtil.debuglog) {\n' - + ' debug = debugUtil.debuglog(\'stream\');\n' - + '} else {\n' - + ' debug = function () {};\n' - + '}\n/**/\n' - ] - - , deprecateReplacement = [ - /util.deprecate/ - , 'require(\'util-deprecate\')' - ] - - , objectDefinePropertyReplacement = [ - /(Object\.defineProperties)/ - , 'if (Object.defineProperties) $1' - ] - , objectDefinePropertySingReplacement = [ - /Object\.defineProperty\(([\w\W]+?)\}\);/ - , '(function (){try {\n' - + 'Object.defineProperty\($1});\n' - + '}catch(_){}}());\n' - ] - - , objectKeysDefine = require('./common-replacements').objectKeysDefine - - , objectKeysReplacement = require('./common-replacements').objectKeysReplacement - - , eventEmittterReplacement = [ - /^(const EE = require\('events'\));$/m - , '/**/\n$1.EventEmitter;\n\n' - + 'var EElistenerCount = function(emitter, type) {\n' - + ' return emitter.listeners(type).length;\n' - + '};\n/**/\n' - ] - - , eventEmittterListenerCountReplacement = [ - /(EE\.listenerCount)/g - , 'EElistenerCount' - ] - - , bufferIsEncodingReplacement = [ - /Buffer.isEncoding\((\w+)\)/ - , '([\'hex\', \'utf8\', \'utf-8\', \'ascii\', \'binary\', \'base64\',\n' - + '\'ucs2\', \'ucs-2\',\'utf16le\', \'utf-16le\', \'raw\']\n' - + '.indexOf(($1 + \'\').toLowerCase()) > -1)' - ] - - , requireStreamReplacement = [ - /const Stream = require\('stream'\);/ - , '\n\n/**/\n' - + 'var Stream = require(\'./internal/streams/stream\')' - + '\n/**/\n' - ] - - , isBufferReplacement = [ - /(\w+) instanceof Buffer/g - , 'Buffer.isBuffer($1)' - ] - - , internalUtilReplacement = [ - /^const internalUtil = require\('internal\/util'\);/m - , '\n/**/\nconst internalUtil = {\n deprecate: require(\'util-deprecate\')\n};\n' - + '/**/\n' - ] - , internalDirectory = [ - /require\('internal\/streams\/([a-zA-z]+)'\)/g, - 'require(\'./internal/streams/$1\')' - ] - , fixInstanceCheck = [ - /if \(typeof Symbol === 'function' && Symbol\.hasInstance\) \{/, - `if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {` - ] - , removeOnWriteBind = [ - /onwrite\.bind\([^)]+?\)/ - , `function(er) { onwrite(stream, er); }` - ] - , addUintStuff = [ - /(?:var|const) (?:{ )Buffer(?: }) = require\('buffer'\)(?:\.Buffer)?;/g - , ` - const Buffer = require('buffer').Buffer - const OurUint8Array = global.Uint8Array || function () {} -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} - ` - ] - , addConstructors = [ - headRegexp - , `$1 - -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - this.next = null; - this.entry = null; - this.finish = () => { onCorkedFinish(this, state) }; -} -/* */ -` - ] - , useWriteReq = [ - /state\.lastBufferedRequest = \{.+?\}/g - , `state.lastBufferedRequest = new WriteReq(chunk, encoding, cb)` - ] - , useCorkedRequest = [ - /var corkReq = [\s\S]+?(.+?)\.corkedRequestsFree = corkReq/g - , `$1.corkedRequestsFree = new CorkedRequest($1)` - ] - , fixUintStuff = [ - /Stream\.(_isUint8Array|_uint8ArrayToBuffer)\(/g - , `$1(` - ] - , fixBufferCheck = [ - /Object\.getPrototypeOf\((chunk)\) !== Buffer\.prototype/g - , '!Buffer.isBuffer($1)' - ] - , errorsOneLevel = [ - /internal\/errors/ - , '../errors' - ] - , errorsTwoLevel = [ - /internal\/errors/ - , '../../../errors' - ] - , warnings = [ - /^const { emitExperimentalWarning } = require\('internal\/util'\);/m, - 'const { emitExperimentalWarning } = require(\'../experimentalWarning\');' - ] - , numberIE11 = [ - /Number\.isNaN\(n\)/g - , 'n !== n' - ] - , integerIE11 = [ - /Number\.isInteger\(hwm\)/g - , '(isFinite(hwm) && Math.floor(hwm) === hwm)' - ] - , noAsyncIterators1 = [ - /Readable\.prototype\[Symbol\.asyncIterator\] = function\(\) \{/g - , 'if (typeof Symbol === \'function\' ) {\nReadable.prototype[Symbol.asyncIterator] = function () {' - ] - , noAsyncIterators2 = [ - /return createReadableStreamAsyncIterator\(this\);\n};/m - , 'return createReadableStreamAsyncIterator(this);\n};\n}' - ] - , noAsyncIteratorsFrom1 = [ - /Readable\.from = function *\(iterable, opts\) \{/g - , 'if (typeof Symbol === \'function\' ) {\nReadable.from = function (iterable, opts) {' - ] - , noAsyncIteratorsFrom2 = [ - /return from\(Readable, iterable, opts\);\n};/m - , 'return from(Readable, iterable, opts);\n};\n}' - ] - , once = [ - /const \{ once \} = require\('internal\/util'\);/ - , 'function once(callback) { let called = false; return function(...args) { if (called) return; called = true; callback(...args); }; }' - ] - -module.exports['_stream_duplex.js'] = [ - requireReplacement - , instanceofReplacement - , utilReplacement - , inherits - , stringDecoderReplacement - , objectKeysReplacement - , objectKeysDefine - , errorsOneLevel -] - -module.exports['_stream_passthrough.js'] = [ - requireReplacement - , instanceofReplacement - , utilReplacement - , inherits - , stringDecoderReplacement - , errorsOneLevel -] - -module.exports['_stream_readable.js'] = [ - addDuplexRequire - , addDuplexDec - , requireReplacement - , instanceofReplacement - , altIndexOfImplReplacement - , altIndexOfUseReplacement - , stringDecoderReplacement - , debugLogReplacement - , utilReplacement - , inherits - , stringDecoderReplacement - , eventEmittterReplacement - , requireStreamReplacement - , isBufferReplacement - , eventEmittterListenerCountReplacement - , internalDirectory - , fixUintStuff - , addUintStuff - , errorsOneLevel - , warnings - , numberIE11 - , noAsyncIterators1 - , noAsyncIterators2 - , noAsyncIteratorsFrom1 - , noAsyncIteratorsFrom2 -] - -module.exports['_stream_transform.js'] = [ - requireReplacement - , instanceofReplacement - , utilReplacement - , inherits - , stringDecoderReplacement - , errorsOneLevel -] - -module.exports['_stream_writable.js'] = [ - addDuplexRequire - , addDuplexDec - , requireReplacement - , instanceofReplacement - , utilReplacement - , inherits - , stringDecoderReplacement - , debugLogReplacement - , deprecateReplacement - , objectDefinePropertyReplacement - , objectDefinePropertySingReplacement - , bufferIsEncodingReplacement - , [ /^var assert = require\('assert'\);$/m, '' ] - , requireStreamReplacement - , isBufferReplacement - , internalUtilReplacement - , fixInstanceCheck - , removeOnWriteBind - , internalDirectory - , fixUintStuff - , addUintStuff - , fixBufferCheck - , useWriteReq - , useCorkedRequest - , addConstructors - , errorsOneLevel -] - -module.exports['internal/streams/buffer_list.js'] = [ - [ - /inspect.custom/g, - 'custom' - ], - [ - /const \{ inspect \} = require\('util'\);/, - ` -const { inspect } = require('util') -const custom = inspect && inspect.custom || 'inspect' - ` - ] -] -module.exports['internal/streams/destroy.js'] = [ - errorsTwoLevel -] - -module.exports['internal/streams/state.js'] = [ - , errorsTwoLevel - , integerIE11 -] - -module.exports['internal/streams/async_iterator.js'] = [ - , errorsTwoLevel - , [ - /internal\/streams\/end-of-stream/, - './end-of-stream' - ] - , [ - /const AsyncIteratorPrototype = Object\.getPrototypeOf\(\n.*Object\.getPrototypeOf\(async function\* \(\) \{\}\).prototype\);/m, - 'const AsyncIteratorPrototype = Object\.getPrototypeOf(function () {})' - ] - , [ - / return\(\)/, - '[Symbol.asyncIterator]() { return this },\n return\(\)' - ] -] - -module.exports['internal/streams/end-of-stream.js'] = [ - , errorsTwoLevel - , [ - /const \{ once \} = require\('internal\/util'\);/, - `function once(callback) { - let called = false; - return function(...args) { - if (called) return; - called = true; - callback.apply(this, args); - }; -}` - ] -] - -module.exports['internal/streams/pipeline.js'] = [ - once - , errorsTwoLevel - , [ - /require\('internal\/streams\/end-of-stream'\)/, - 'require(\'.\/end-of-stream\')' - ] -] - -module.exports['internal/streams/from.js'] = [ - errorsTwoLevel - , [ - /if \(iterable && iterable\[Symbol.asyncIterator\]\)/ - , `if (iterable && typeof iterable.next === 'function') { - iterator = iterable - } -else if (iterable && iterable[Symbol.asyncIterator])` - ] -] diff --git a/build/files.mjs b/build/files.mjs new file mode 100644 index 0000000000..8dbc03b392 --- /dev/null +++ b/build/files.mjs @@ -0,0 +1,26 @@ +export const sources = [ + 'lib/_stream_.+', + 'lib/internal/errors.js', + 'lib/internal/streams/.+', + 'lib/internal/wrap_js_stream.js', + 'test/parallel/test-stream.+', + 'test/parallel/test-readable.+', + 'test/common/index.js', + 'test/common/tmpdir.js' +] + +export const skippedSources = [ + 'test/parallel/test-stream2-httpclient-response-end.js', + 'test/parallel/test-stream-base-no-abort.js', + 'test/parallel/test-stream-preprocess.js', + 'test/parallel/test-stream-inheritance.js', + 'test/parallel/test-stream-base-prototype-accessors.js', + 'test/parallel/test-stream-base-prototype-accessors-enumerability.js', + 'test/parallel/test-stream-wrap-drain.js', + 'test/parallel/test-stream-pipeline-http2.js', + 'test/parallel/test-stream-base-typechecking.js' +] + +export const aliases = { + 'lib/internal/errors.js': ['lib/internal/errors-browser.js'] +} diff --git a/build/footers.mjs b/build/footers.mjs new file mode 100644 index 0000000000..ed717848ab --- /dev/null +++ b/build/footers.mjs @@ -0,0 +1,47 @@ +const streamWritable = ` +/* replacement start */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + this.next = null; + this.entry = null; + this.finish = () => { onCorkedFinish(this, state) }; +} +/* replacement end */ +` + +const streamLegacy = ` +/* replacement start */ +Stream._uint8ArrayToBuffer = function(chunk) { + return Buffer.from(chunk); +} +Stream._isUint8Array = function(obj) { + return Buffer.isBuffer(obj) || obj instanceof Uint8Array; +} +/* replacement end */ +` + +const testParallel = ` +/* replacement start */ +process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(\`test failed - exited code \${code}\`); + } +}); +/* replacement end */ +` + +export const footers = { + 'lib/_stream_writable.js': streamWritable, + 'lib/internal/streams/legacy.js': streamLegacy, + 'test/parallel/.+': testParallel +} diff --git a/build/replacements.mjs b/build/replacements.mjs new file mode 100644 index 0000000000..1fd8c4ed13 --- /dev/null +++ b/build/replacements.mjs @@ -0,0 +1,179 @@ +const streamsInternalsRequireRelativeUtil = ["require\\('util'\\)", "require('../../util')"] + +const streamsInternalsRequireRelativeInternalUtil = ["require\\('internal/util'\\)", "require('../../util')"] + +const streamsInternalsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] + +const inspectCustom = ['inspect.custom', '"custom"'] + +const internalRequireRelativeUtil = ["require\\('util'\\)", "require('../util')"] + +const errorsGetSystemErrorName = [ + "require\\('internal/util'\\)", + ` +{ + getSystemErrorName(err) { + const entry = errmap.get(err); + return entry ? entry[0] : \`Unknown system error \${err}\`; + } +} + ` +] + +const errorsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] + +const errorsRequireInternalUtil = ["const (.+) = require\\('\\.\\./util'\\);", "const $1 = require('../../util');"] + +const errorsBinding = ["process.binding\\('uv'\\)", "require('./uv-browser')"] + +const errorsBufferMaxLength = [ + "const \\{ kMaxLength \\} = process\\.binding\\('buffer'\\);", + 'const kMaxLength = 4294967296;' +] + +const wrapBinding = [/internalBinding/, 'process.binding'] + +const streamsRequireRelative = ["require\\('(_stream.+)'\\);", "require('./$1');"] + +const streamsRequireDeprecate = ["require\\('internal/util'\\);", "{ deprecate: require('util-deprecate') };"] + +const streamsRequireRelativeInternal = ["require\\('(internal/[^']+)'\\)", "require('./$1')"] + +const streamsRequireLegacy = [ + "const Stream = require\\('stream'\\);", + "const Stream = require('./internal/streams/legacy');" +] + +const streamsRequireRelativeUtil = ["const util = require\\('util'\\);", "const util = require('./util');"] + +const streamsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./_stream_duplex')"] + +const streamsWritableIsBuffer = ['Object\\.getPrototypeOf\\((chunk)\\) !== Buffer\\.prototype', '!Buffer.isBuffer($1)'] + +const streamsWritableWriteRequest = [ + 'state\\.lastBufferedRequest = \\{[^}]+\\}', + 'state.lastBufferedRequest = new WriteReq(chunk, encoding, cb)' +] + +const streamsWritableCorkedRequest = [ + 'var corkReq = [\\s\\S]+?(\\S+?)\\.corkedRequestsFree = corkReq', + '$1.corkedRequestsFree = new CorkedRequest($1)' +] + +const testCommonAsyncHooksDisableStart = ["(require\\('async_hooks'\\))", '/* $1'] + +const testCommonAsyncHooksDisableEnd = ['(\\}\\).enable\\(\\);)', '$1 */'] + +const testCommonTimer = ["process\\.binding\\('timer_wrap'\\)\\.Timer;", '{ now: function (){} };'] + +const testCommonLeakedGlobals = [ + '(function leakedGlobals\\(\\) \\{)', + ` +/* replacement start */ +if (typeof constructor == 'function') { + knownGlobals.push(constructor); +} + +if (typeof DTRACE_NET_SOCKET_READ == 'function') { + knownGlobals.push(DTRACE_NET_SOCKET_READ); +} + +if (typeof DTRACE_NET_SOCKET_WRITE == 'function') { + knownGlobals.push(DTRACE_NET_SOCKET_WRITE); +} + +if (global.__coverage__ == 'function') { + knownGlobals.push(global.__coverage__); +} + +for (const item of ['queueMicrotask', 'performance']) { + if (typeof global[item] !== undefined) { + knownGlobals.push(global[item]); + } +} +/* replacement end */ + +$1 +` +] + +// Following replacements on this file are for browser tests +// const testCommonHasCrypto = ['const hasCrypto = Boolean\\(process.versions.openssl\\);', 'const hasCrypto = true;'] + +// const testCommonWorkerThreads = ["require\\('module'\\)\\.builtinModules\\.includes\\('worker_threads'\\)", 'false'] + +// const testCommonArgv = ['process.argv.length === 2', 'false'] + +// const testCommonCpus = ['os.cpus()', 'os.cpus().length === 0 ? [{ speed: 1000 }] : os.cpus()'] + +// const testCommonBuildType = [ +// 'const buildType = process.config.target_defaults.default_configuration;', +// "const buildType = 'readable-stream';" +// ] + +const testParallelIncludeTap = [ + "('use strict')", + + `$1 + +const tap = require('tap'); +const silentConsole = { log() {}, error() {} }; +` +] + +const testParallelRequireStream = ["require\\('stream'\\)", "require('../../lib')"] + +const testParallelRequireStreamInternals = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"] + +const testParallelRequireStreamClasses = [ + 'Stream.(Readable|Writable|Duplex|Transform|PassThrough)', + "require('../../lib').$1" +] + +const testParallelPromisify = [ + "const \\{ promisify \\} = require\\('util'\\);", + "const promisify = require('util-promisify');" +] + +const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1'] + +export const replacements = { + 'lib/internal/streams/.+': [ + streamsInternalsRequireRelativeUtil, + streamsInternalsRequireRelativeInternalUtil, + streamsInternalsRequireInternal, + inspectCustom + ], + 'lib/internal/[^/]+': [internalRequireRelativeUtil], + 'lib/internal/errors-browser.js': [ + errorsGetSystemErrorName, + errorsRequireInternal, + errorsRequireInternalUtil, + errorsBinding, + errorsBufferMaxLength + ], + 'lib/internal/wrap_js_stream.js': [wrapBinding], + 'lib/_stream_.+': [ + streamsRequireRelative, + streamsRequireDeprecate, + streamsRequireRelativeInternal, + streamsRequireLegacy, + streamsRequireRelativeUtil + ], + 'lib/_stream_(readable|writable).js': [streamsRequireRelativeDuplex], + 'lib/_stream_writable.js': [streamsWritableIsBuffer, streamsWritableWriteRequest, streamsWritableCorkedRequest], + 'test/common/index.js': [ + testCommonAsyncHooksDisableStart, + testCommonAsyncHooksDisableEnd, + testCommonTimer, + testCommonLeakedGlobals + ], + 'test/parallel/.+': [ + testParallelIncludeTap, + testParallelRequireStream, + testParallelRequireStreamInternals, + testParallelRequireStreamClasses, + testParallelPromisify, + testParallelSilentConsole + ] +} diff --git a/build/test-replacements.js b/build/test-replacements.js deleted file mode 100644 index eebdec5d67..0000000000 --- a/build/test-replacements.js +++ /dev/null @@ -1,473 +0,0 @@ -const altForEachImplReplacement = require('./common-replacements').altForEachImplReplacement - , altForEachUseReplacement = require('./common-replacements').altForEachUseReplacement - , altIndexOfImplReplacement = require('./common-replacements').altIndexOfImplReplacement - , altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement - , objectKeysDefine = - require('./common-replacements').objectKeysDefine - , objectKeysReplacement = - require('./common-replacements').objectKeysReplacement - , bufferShimFix = - require('./common-replacements').bufferShimFix - , bufferStaticMethods = - require('./common-replacements').bufferStaticMethods - , specialForEachReplacment = - require('./common-replacements').specialForEachReplacment - , deepStrictEqual = [ - /util\.isDeepStrictEqual/, - 'require(\'deep-strict-equal\')' - ] - , tapOk = [ - /console\.log\('ok'\);/g, - 'require(\'tap\').pass();' - ] - , catchES7 = [ - /} catch {/, - '} catch(_e) {' - ] - , catchES7OpenClose = [ - /} catch {}/, - '} catch(_e) {}' - ] - - -module.exports.all = [ - [ - /require\(['"]stream['"]\)/g - , 'require(\'../../\')' - ] - - // some tests need stream.Stream but readable.js doesn't offer that - // and we've undone it with the previous replacement - - , [ - /stream\.Stream|require\('\.\.\/\.\.\/'\)\.Stream/g - , 'require(\'stream\').Stream' - ] - - , [ - /require\(['"](_stream_\w+)['"]\)/g - , 'require(\'../../lib/$1\')' - ] - - , [ - /Stream.(Readable|Writable|Duplex|Transform|PassThrough)/g - , 'require(\'../../\').$1' - ] - , bufferShimFix - , bufferStaticMethods - , [ - /require\(['"]assert['"]\)/g - , 'require(\'assert/\')' - ] - , [ - /\/\/ Flags: .*/ - , '' - ] -] - -module.exports['test-stream2-basic.js'] = [ - altForEachImplReplacement - , specialForEachReplacment -] - -module.exports['test-stream2-objects.js'] = [ - altForEachImplReplacement - , altForEachUseReplacement -] - -module.exports['test-stream2-transform.js'] = [ - altForEachImplReplacement - , altForEachUseReplacement -] - -module.exports['test-stream2-writable.js'] = [ - altForEachImplReplacement - , altForEachUseReplacement - , [ - /'latin1',/g, - '\'binary\',' - ] -] - -module.exports['test-stream-big-packet.js'] = [ - altIndexOfImplReplacement - , altIndexOfUseReplacement -] - -module.exports['test-stream-end-paused.js'] = [ - [ - /console.log\('ok'\);/, - '' - ] -] - -module.exports['common.js'] = [ - objectKeysDefine - , objectKeysReplacement - , altForEachImplReplacement - , altForEachUseReplacement - , deepStrictEqual - , catchES7 - , catchES7OpenClose - , [ - /require\('module'\)\.builtinModules\.includes\('worker_threads'\)/, - 'false' - ] - , [ - /process.argv.length === 2/, - 'false' - ] - , [ - /^( for \(var x in global\) \{|function leakedGlobals\(\) \{)$/m - , ' /**/\n' - + ' if (typeof constructor == \'function\')\n' - + ' knownGlobals.push(constructor);\n' - + ' if (typeof DTRACE_NET_SOCKET_READ == \'function\')\n' - + ' knownGlobals.push(DTRACE_NET_SOCKET_READ);\n' - + ' if (typeof DTRACE_NET_SOCKET_WRITE == \'function\')\n' - + ' knownGlobals.push(DTRACE_NET_SOCKET_WRITE);\n' - + ' if (global.__coverage__)\n' - + ' knownGlobals.push(__coverage__);\n' - + '\'console,clearImmediate,setImmediate,core,__core-js_shared__,Promise,Map,Set,WeakMap,WeakSet,Reflect,System,queueMicrotask,asap,Observable,regeneratorRuntime,_babelPolyfill\'.split(\',\').filter(function (item) { return typeof global[item] !== undefined}).forEach(function (item) {knownGlobals.push(global[item])})' - + ' /**/\n\n$1' - ] - - , [ - /(exports.mustCall[\s\S]*)/m - , '$1\n' - + 'if (!util._errnoException) {\n' - + ' var uv;\n' - + ' util._errnoException = function(err, syscall) {\n' - + ' if (util.isUndefined(uv)) try { uv = process.binding(\'uv\'); } catch (e) {}\n' - + ' var errname = uv ? uv.errname(err) : \'\';\n' - + ' var e = new Error(syscall + \' \' + errname);\n' - + ' e.code = errname;\n' - + ' e.errno = errname;\n' - + ' e.syscall = syscall;\n' - + ' return e;\n' - + ' };\n' - + '}\n' - ] - - , [ - /^if \(global\.ArrayBuffer\) \{([^\}]+)\}$/m - , '/**/if (!process.browser) {' - + '\nif \(global\.ArrayBuffer\) {$1}\n' - + '}/**/\n' - ] - , [ - /^Object\.defineProperty\(([\w\W]+?)\}\)\;/mg - , '/**/if (!process.browser) {' - + '\nObject\.defineProperty($1});\n' - + '}/**/\n' - ] - , [ - /if \(!process\.send\)/ - , 'if (!process.send && !process.browser)' - ] - , [ - /^/, - `/**/ - require('@babel/polyfill'); - var util = require('util'); - for (var i in util) exports[i] = util[i]; - /**/` - ], - [ - /var regexp = `\^\(\\\\w\+\)\\\\s\+\\\\s\$\{port\}\/\$\{protocol\}\\\\s`;/, - `var regexp = '^(\\w+)\\s+\\s' + port + '/' + protocol + '\\s';` - ], - [ - /require\(['"]stream['"]\)/g - , 'require(\'../../\')' - ], - [ - /^var util = require\('util'\);/m - , '\n/**/\nvar util = require(\'core-util-is\');\n' - + 'util.inherits = require(\'inherits\');\n/**/\n' - ], - [ - /^const util = require\('util'\);/m -, '\n/**/\nvar util = require(\'core-util-is\');\n' - + 'util.inherits = require(\'inherits\');\n/**/\n' -] -, [ - /process\.binding\('timer_wrap'\)\.Timer;/, - '{now: function (){}}' -], -[ - /(exports\.enoughTestCpu[^;]+;)/, - '/*$1*/' -], -[ - /exports\.buildType/, - '//exports.buildType' -], -[ - /require\('async_hooks'\)/, - '/*require(\'async_hooks\')' -], -[ - /\}\).enable\(\);/, - '}).enable();*/' -], -[ - /const async_hooks = require\('async_hooks'\)/, - 'var async_hooks = require(\'async_\' + \'hooks\')' -], -[ - /(?:var|const) async_wrap = process\.binding\('async_wrap'\);\n.*(?:var|const) (?:{ )?kCheck(?: })? = async_wrap\.constants(?:\.kCheck)?;/gm, - '// const async_wrap = process.binding(\'async_wrap\');\n' + - ' // const kCheck = async_wrap.constants.kCheck;' -], -[ - /async_wrap\.async_hook_fields\[kCheck\] \+= 1;/, - '// async_wrap.async_hook_fields[kCheck] += 1;' -], -[ - /os\.cpus\(\)/, - 'os.cpus().length === 0 ? [{ speed: 1000 }] : os.cpus()' -], -[ - /const buildType = process.config.target_defaults.default_configuration;/, - 'const buildType = \'readable-stream\';' -], -[ - /const hasCrypto = Boolean\(process.versions.openssl\);/, - 'const hasCrypto = true;' -] -] - -// this test has some trouble with the nextTick depth when run -// to stdout, it's also very noisy so we'll quiet it -module.exports['test-stream-pipe-multi.js'] = [ - altForEachImplReplacement - , altForEachUseReplacement - , [ - /console\.error/g - , '//console.error' - ] - - , [ - /process\.nextTick/g - , 'setImmediate' - ] -] - -// just noisy -module.exports['test-stream2-large-read-stall.js'] = [ - [ - /console\.error/g - , ';false && console.error' - ] -] - -module.exports['test-stream-pipe-cleanup.js'] = [ - [ - /(function Writable\(\) \{)/ - , '(function (){\nif (/^v0\\.8\\./.test(process.version))\n return\n\n$1' - ] - , - [ - /$/ - ,'}())' - ] -] - -module.exports['test-stream2-stderr-sync.js'] = [ - altForEachImplReplacement - , altForEachUseReplacement - , [ - // 'tty_wrap' is too different across node versions. - // this bypasses it and replicates a console.error() test - /(function child0\(\) \{)/ - , '$1\n' - + ' return console.error(\'child 0\\nfoo\\nbar\\nbaz\');\n' - ] -] - -module.exports['test-stream-unshift-read-race.js'] = [ - [ - /data\.slice\(pos, pos \+ n\)/g, - 'data.slice(pos, Math.min(pos + n, data.length))' - ] -] - -module.exports['test-stream-pipe-without-listenerCount.js'] = [ - [ - /require\(\'stream\'\)/g, - 'stream' - ] -] - -module.exports['test-stream2-unpipe-drain.js'] = [ - [ - /^/, - `(function () {\n` - ], - [ - /$/ - ,'}())' - ] -] - -module.exports['test-stream2-decode-partial.js'] = [ - [ - /readable\.push\(source\.slice\(4, 6\)\)/ - ,`readable.push(source.slice(4, source.length));` - ] -] - - -module.exports['test-stream3-cork-uncork.js'] = module.exports['test-stream3-cork-end.js'] = [ - [ - /assert\.ok\(seen\.equals\(expected\)\);/, - 'assert.deepEqual(seen, expected);' - ] -] -module.exports['test-stream2-readable-from-list.js'] = [ - [ - /require\('internal\/streams\/buffer_list'\)/, - 'require(\'../../lib/internal/streams/buffer_list\')' - ], - [ - /assert\.strictEqual\(\n *util.inspect\(\[ list \], \{ compact: false \}\),\n *`\[\n *BufferList \{\n *head: \[Object\],\n *tail: \[Object\],\n *length: 4\n *\}\n *\]`\);/m, - 'assert.strictEqual(util.inspect([ list ], { compact: false }).indexOf(\'BufferList\') > 0, true)' - ] -] -module.exports['test-stream-writev.js'] = [ - tapOk, - [ - /console.log\(`# decode=/, - 'require(\'tap\').test(`# decode=' - ] -] - -module.exports['test-stream3-pause-then-read.js'] = [ - tapOk -] - -module.exports['test-stream-unshift-read-race.js'] = [ - tapOk -] - -module.exports['test-stream2-unpipe-leak.js'] = [ - tapOk -] - -module.exports['test-stream2-compatibility.js'] = [ - tapOk -] - -module.exports['test-stream-push-strings.js'] = [ - tapOk -] - -module.exports['test-stream-unshift-empty-chunk.js'] = [ - tapOk -] - -module.exports['test-stream2-pipe-error-once-listener.js'] = [ - tapOk -] - -module.exports['test-stream-push-order.js'] = [ - tapOk -] - -module.exports['test-stream2-push.js'] = [ - tapOk -] - -module.exports['test-stream2-readable-empty-buffer-no-eof.js'] = [ - tapOk, - [ - /case 3:\n(\s+)setImmediate\(r\.read\.bind\(r, 0\)\);/, - 'case 3:\n$1setTimeout(r.read.bind(r, 0), 50);' - ] -] -module.exports['test-stream-buffer-list.js'] = [ - [ - /require\('internal\/streams\/buffer_list'\);/, - 'require(\'../../lib/internal/streams/buffer_list\');' - ] -] - -module.exports['test-stream-transform-constructor-set-methods.js'] = [ - [ - /Error: _transform\\\(\\\) is n/, - 'Error: .*[Nn]' - ] -] - -module.exports['test-stream-unpipe-event.js'] = [ - [ - /^/, - 'if (process.version.indexOf(\'v0.8\') === 0) { process.exit(0) }\n' - ] -] - -module.exports['test-stream-readable-flow-recursion.js'] = [ - tapOk, - deepStrictEqual -] - -module.exports['test-stream-readable-with-unimplemented-_read.js'] = [ - deepStrictEqual -] - -module.exports['test-stream-writable-needdrain-state.js'] = [ - deepStrictEqual -] - -module.exports['test-stream-readable-setEncoding-null.js'] = [ - deepStrictEqual -] - -module.exports['test-stream-pipeline.js'] = [ - [ - /require\('http2'\)/g, - '{ createServer() { return { listen() {} } } }' - ], - [ - /assert\.deepStrictEqual\(err, new Error\('kaboom'\)\);/g, - 'assert.strictEqual(err.message, \'kaboom\');' - ], - [ - /cb\(new Error\('kaboom'\)\)/g, - 'process.nextTick(cb, new Error(\'kaboom\'))' - ], - [ - /const \{ promisify \} = require\('util'\);/g, - 'const promisify = require(\'util-promisify\');' - ] -] - -module.exports['test-stream-finished.js'] = [ - [ - /const \{ promisify \} = require\('util'\);/g, - 'const promisify = require(\'util-promisify\');' - ] -] - -module.exports['test-stream-readable-async-iterators.js'] = [ - [ - /assert.rejects\(/g, - '(function(f, e) { let success = false; f().then(function() { success = true; throw new Error(\'should not succeed\') }).catch(function(e2) { if (success) { throw e2; } assert.strictEqual(e.message, e2.message); })})(' - ], - [ - /tests\(\).then\(common\.mustCall\(\)\)/, - 'tests().then(common.mustCall(), common.mustNotCall(console.log))' - ], - [ - /const AsyncIteratorPrototype = Object\.getPrototypeOf\(\n.*Object\.getPrototypeOf\(async function\* \(\) \{\}\).prototype\);/m, - 'const AsyncIteratorPrototype = Object\.getPrototypeOf(function () {})' - ] -] - -module.exports['test-readable-from.js'] = [ - [ - /const \{ once \} = require\('events'\);/ - , 'const once = require(\'events.once\');' - ] -] diff --git a/c8.json b/c8.json new file mode 100644 index 0000000000..553b624ef3 --- /dev/null +++ b/c8.json @@ -0,0 +1,9 @@ +{ + "include": ["dist/lib"], + "reporter": ["text", "html"], + "check-coverage": true, + "branches": 75, + "functions": 75, + "lines": 75, + "statements": 75 +} \ No newline at end of file diff --git a/doc/wg-meetings/2015-01-30.md b/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index 83275f192e..0000000000 --- a/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,60 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section - - diff --git a/errors-browser.js b/errors-browser.js deleted file mode 100644 index fb8e73e189..0000000000 --- a/errors-browser.js +++ /dev/null @@ -1,127 +0,0 @@ -'use strict'; - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } - -var codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error; - } - - function getMessage(arg1, arg2, arg3) { - if (typeof message === 'string') { - return message; - } else { - return message(arg1, arg2, arg3); - } - } - - var NodeError = - /*#__PURE__*/ - function (_Base) { - _inheritsLoose(NodeError, _Base); - - function NodeError(arg1, arg2, arg3) { - return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; - } - - return NodeError; - }(Base); - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - codes[code] = NodeError; -} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js - - -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - var len = expected.length; - expected = expected.map(function (i) { - return String(i); - }); - - if (len > 2) { - return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; - } else if (len === 2) { - return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); - } else { - return "of ".concat(thing, " ").concat(expected[0]); - } - } else { - return "of ".concat(thing, " ").concat(String(expected)); - } -} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith - - -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith - - -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - - return str.substring(this_len - search.length, this_len) === search; -} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes - - -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; - } -} - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"'; -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - var determiner; - - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - var msg; - - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); - } else { - var type = includes(name, '.') ? 'property' : 'argument'; - msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); - } - - msg += ". Received type ".concat(typeof actual); - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented'; -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg; -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); -module.exports.codes = codes; diff --git a/errors.js b/errors.js deleted file mode 100644 index 8471526d6e..0000000000 --- a/errors.js +++ /dev/null @@ -1,116 +0,0 @@ -'use strict'; - -const codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error - } - - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } - } - - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); - } - } - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - - codes[code] = NodeError; -} - -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; - } else { - return `of ${thing} ${expected[0]}`; - } - } else { - return `of ${thing} ${String(expected)}`; - } -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; - } -} - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; - } - - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.codes = codes; diff --git a/examples/CAPSLOCKTYPER.JS b/examples/CAPSLOCKTYPER.JS deleted file mode 100644 index a35b975f4e..0000000000 --- a/examples/CAPSLOCKTYPER.JS +++ /dev/null @@ -1,32 +0,0 @@ -var Transform = require('../transform'); -var inherits = require('util').inherits; - -// subclass -function MyStream () { - Transform.call(this, { - lowWaterMark: 0, - encoding: 'utf8' - }); -} -inherits(MyStream, Transform); - -MyStream.prototype._transform = function (chunk, outputFn, callback) { - outputFn(Buffer.from(String(chunk).toUpperCase())); - callback(); -}; - -// use it! -var s = new MyStream(); -process.stdin.resume(); -process.stdin.pipe(s).pipe(process.stdout); -if (process.stdin.setRawMode) - process.stdin.setRawMode(true); -process.stdin.on('data', function (c) { - c = c.toString(); - if (c === '\u0003' || c === '\u0004') { - process.stdin.pause(); - s.end(); - } - if (c === '\r') - process.stdout.write('\n'); -}); diff --git a/examples/capslock-type.cjs b/examples/capslock-type.cjs new file mode 100644 index 0000000000..1125e699c1 --- /dev/null +++ b/examples/capslock-type.cjs @@ -0,0 +1,29 @@ +const { Transform } = require('../lib') + +class MyStream extends Transform { + _transform(chunk, encoding, callback) { + callback(null, Buffer.from(chunk, encoding).toString('utf-8').toUpperCase()) + } +} + +const s = new MyStream() + +process.stdin.resume() +process.stdin.pipe(s).pipe(process.stdout) + +if (process.stdin.setRawMode) { + process.stdin.setRawMode(true) +} + +process.stdin.on('data', function (c) { + c = c.toString() + + if (c === '\u0003' || c === '\u0004') { + process.stdin.pause() + s.end() + } + + if (c === '\r') { + process.stdout.write('\n') + } +}) diff --git a/examples/typer.js b/examples/typer.js deleted file mode 100644 index c16eb6fb00..0000000000 --- a/examples/typer.js +++ /dev/null @@ -1,17 +0,0 @@ -var fs = require('fs'); -var fst = fs.createReadStream(__filename); -var Readable = require('../readable.js'); -var rst = new Readable(); -rst.wrap(fst); - -rst.on('end', function() { - process.stdin.pause(); -}); - -process.stdin.setRawMode(true); -process.stdin.on('data', function() { - var c = rst.read(3); - if (!c) return setTimeout(process.exit, 500) - process.stdout.write(c); -}); -process.stdin.resume(); diff --git a/examples/typer.mjs b/examples/typer.mjs new file mode 100644 index 0000000000..4d8f4bc0f0 --- /dev/null +++ b/examples/typer.mjs @@ -0,0 +1,22 @@ +import { createReadStream } from 'node:fs' +import { Readable } from '../lib/index.js' + +const fst = createReadStream(new URL(import.meta.url).pathname) +const rst = new Readable() + +rst.wrap(fst) + +rst.on('end', function () { + process.stdin.pause() +}) + +console.log("Every time you press a key, you will see more contents of the source file. Let's begin!\n\n") +process.stdin.setRawMode(true) +process.stdin.on('data', function () { + const c = rst.read(25) + if (!c) { + return setTimeout(process.exit, 500) + } + process.stdout.write(c) +}) +process.stdin.resume() diff --git a/experimentalWarning.js b/experimentalWarning.js deleted file mode 100644 index 78e841495b..0000000000 --- a/experimentalWarning.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -var experimentalWarnings = new Set(); - -function emitExperimentalWarning(feature) { - if (experimentalWarnings.has(feature)) return; - var msg = feature + ' is an experimental feature. This feature could ' + - 'change at any time'; - experimentalWarnings.add(feature); - process.emitWarning(msg, 'ExperimentalWarning'); -} - -function noop() {} - -module.exports.emitExperimentalWarning = process.emitWarning - ? emitExperimentalWarning - : noop; diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js deleted file mode 100644 index 6752519225..0000000000 --- a/lib/_stream_duplex.js +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. -'use strict'; -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -module.exports = Duplex; - -var Readable = require('./_stream_readable'); - -var Writable = require('./_stream_writable'); - -require('inherits')(Duplex, Readable); - -{ - // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); - - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; - - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; - - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); - } - } -} - -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; - } -}); // the no-half-open enforcer - -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; // no more data can be written. - // But allow more writes to happen in this tick. - - process.nextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } -}); \ No newline at end of file diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js deleted file mode 100644 index 32e7414c5a..0000000000 --- a/lib/_stream_passthrough.js +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -require('inherits')(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js deleted file mode 100644 index 192d451488..0000000000 --- a/lib/_stream_readable.js +++ /dev/null @@ -1,1124 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -'use strict'; - -module.exports = Readable; -/**/ - -var Duplex; -/**/ - -Readable.ReadableState = ReadableState; -/**/ - -var EE = require('events').EventEmitter; - -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ - - -var Stream = require('./internal/streams/stream'); -/**/ - - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} - -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} -/**/ - - -var debugUtil = require('util'); - -var debug; - -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function debug() {}; -} -/**/ - - -var BufferList = require('./internal/streams/buffer_list'); - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. - - -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; - -require('inherits')(Readable, Stream); - -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} - -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - - this.sync = true; // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; // Should close be emitted on destroy. Defaults to true. - - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') - - this.autoDestroy = !!options.autoDestroy; // has it been destroyed - - this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - - this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s - - this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled - - this.readingMore = false; - this.decoder = null; - this.encoding = null; - - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); // legacy - - this.readable = true; - - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } - - Stream.call(this); -} - -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined) { - return false; - } - - return this._readableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._readableState.destroyed = value; - } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; - -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; // Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. - - -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; - } - - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; // Unshift should *always* be something directly out of read() - - -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; - -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; - - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } - } // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - - - return !state.ended && (state.length < state.highWaterMark || state.length === 0); -} - -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); - } - - maybeReadMore(stream, state); -} - -function chunkInvalid(state, chunk) { - var er; - - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); - } - - return er; -} - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; // backwards compatibility. - - -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 - - this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: - - var p = this._readableState.buffer.head; - var content = ''; - - while (p !== null) { - content += decoder.write(p.data); - p = p.next; - } - - this._readableState.buffer.clear(); - - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; // Don't raise the hwm > 1GB - - -var MAX_HWM = 0x40000000; - -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - - return n; -} // This function is designed to be inlinable, so please take care when making -// changes to the function body. - - -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } // If we're asking for more than the current hwm, then raise the hwm. - - - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; // Don't have enough - - if (!state.ended) { - state.needReadable = true; - return 0; - } - - return state.length; -} // you can override either this method, or the async _read(n) below. - - -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. - - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - - - var doRead = state.needReadable; - debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some - - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - - - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; // if the length is currently zero, then we *need* a readable event. - - if (state.length === 0) state.needReadable = true; // call internal read method - - this._read(state.highWaterMark); - - state.sync = false; // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - - if (!state.reading) n = howMuchToRead(nOrig, state); - } - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; - } - - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. - - if (nOrig !== n && state.ended) endReadable(this); - } - - if (ret !== null) this.emit('data', ret); - return ret; -}; - -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; - - if (state.decoder) { - var chunk = state.decoder.end(); - - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - - state.ended = true; - - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); - } - } -} // Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. - - -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; - - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); - } -} - -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - - - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); -} // at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. - - -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) // didn't get any data, stop spinning. - break; - } - - state.readingMore = false; -} // abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. - - -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - - case 1: - state.pipes = [state.pipes, dest]; - break; - - default: - state.pipes.push(dest); - break; - } - - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); - - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - - function onend() { - debug('onend'); - dest.end(); - } // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - - - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - - function cleanup() { - debug('cleanup'); // cleanup event handlers once the pipe is broken - - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); - - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - } - - src.pause(); - } - } // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - - - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } // Make sure our error handler is attached before userland ones. - - - prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. - - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - - dest.once('close', onclose); - - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } // tell the dest that it's being piped to - - - dest.emit('pipe', src); // start the flow if it hasn't been started already. - - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; // if we're not piping anywhere, then do nothing. - - if (state.pipesCount === 0) return this; // just one destination. most common case. - - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; // got a match. - - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } // slow case. multiple pipe destinations. - - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - } - - return this; - } // try to find the right one. - - - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; // set up data events if they are asked for -// Ensure readable listeners eventually get something - - -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; - - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused - - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); - - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); - } - } - } - - return res; -}; - -Readable.prototype.addListener = Readable.prototype.on; - -Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); - - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - - return res; -}; - -Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); - - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - - return res; -}; - -function updateReadableListening(self) { - var state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; - - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); - } -} - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} // pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. - - -Readable.prototype.resume = function () { - var state = this._readableState; - - if (!state.flowing) { - debug('resume'); // we flow only if there is no one listening - // for readable, but we still have to call - // resume() - - state.flowing = !state.readableListening; - resume(this, state); - } - - state.paused = false; - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - debug('resume', state.reading); - - if (!state.reading) { - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - - this._readableState.paused = true; - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - - while (state.flowing && stream.read() !== null) { - ; - } -} // wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. - - -Readable.prototype.wrap = function (stream) { - var _this = this; - - var state = this._readableState; - var paused = false; - stream.on('end', function () { - debug('wrapped end'); - - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - - _this.push(null); - }); - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode - - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - - if (!ret) { - paused = true; - stream.pause(); - } - }); // proxy all the other methods. - // important when wrapping filters and duplexes. - - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } // proxy certain important events. - - - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } // when we try to consume some more bytes, simply unpause the - // underlying stream. - - - this._read = function (n) { - debug('wrapped _read', n); - - if (paused) { - paused = false; - stream.resume(); - } - }; - - return this; -}; - -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); - } - - return createReadableStreamAsyncIterator(this); - }; -} - -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; - } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; - } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } - } -}); // exposed for testing purposes only. - -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.length; - } -}); // Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. - -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); - } - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. - - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; - - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); - } - } - } -} - -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = require('./internal/streams/from'); - } - - return from(Readable, iterable, opts); - }; -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - - return -1; -} \ No newline at end of file diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js deleted file mode 100644 index 41a738c4e9..0000000000 --- a/lib/_stream_transform.js +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. -'use strict'; - -module.exports = Transform; - -var _require$codes = require('../errors').codes, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; - -var Duplex = require('./_stream_duplex'); - -require('inherits')(Transform, Duplex); - -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } - - ts.writechunk = null; - ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; - - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; // start out asking for a readable event once data is transformed. - - this._readableState.needReadable = true; // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } // When the writable side finishes, then flush out anything remaining. - - - this.on('prefinish', prefinish); -} - -function prefinish() { - var _this = this; - - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); - } -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; // This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. - - -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; // Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. - - -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; - - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - }); -}; - -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); -} \ No newline at end of file diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js deleted file mode 100644 index a2634d7c24..0000000000 --- a/lib/_stream_writable.js +++ /dev/null @@ -1,697 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. -'use strict'; - -module.exports = Writable; -/* */ - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} // It seems a linked list but it is not -// there will be only 2 of these for each stream - - -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ - -/**/ - - -var Duplex; -/**/ - -Writable.WritableState = WritableState; -/**/ - -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ - -var Stream = require('./internal/streams/stream'); -/**/ - - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} - -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; - -var errorOrDestroy = destroyImpl.errorOrDestroy; - -require('inherits')(Writable, Stream); - -function nop() {} - -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream - // contains buffers or objects. - - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called - - this.finalCalled = false; // drain event flag. - - this.needDrain = false; // at the start of calling end() - - this.ending = false; // when end() has been called, and returned - - this.ended = false; // when 'finish' is emitted - - this.finished = false; // has it been destroyed - - this.destroyed = false; // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - - this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - - this.length = 0; // a flag to see when we're in the middle of a write. - - this.writing = false; // when true all writes will be buffered until .uncork() call - - this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - - this.sync = true; // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - - this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) - - this.onwrite = function (er) { - onwrite(stream, er); - }; // the callback that the user supplies to write(chunk,encoding,cb) - - - this.writecb = null; // the amount that is being written when _write is called. - - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - - this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - - this.prefinished = false; // True if the error was already emitted and should not be thrown again - - this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. - - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') - - this.autoDestroy = !!options.autoDestroy; // count buffered requests - - this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - - this.corkedRequestsFree = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - - while (current) { - out.push(current); - current = current.next; - } - - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); // Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. - - -var realHasInstance; - -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; - } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; -} - -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); // legacy. - - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; - } - - Stream.call(this); -} // Otherwise people can pipe Writable streams, which is just wrong. - - -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; - -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb - - errorOrDestroy(stream, er); - process.nextTick(cb, er); -} // Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. - - -function validChunk(stream, state, chunk, cb) { - var er; - - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); - } - - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; - } - - return true; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - var isBuf = !state.objectMode && _isUint8Array(chunk); - - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } - return ret; -}; - -Writable.prototype.cork = function () { - this._writableState.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; - -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - - return chunk; -} - -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); // if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. - -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } - } - - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. - - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); // this can emit finish, and it will always happen - // after error - - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); // this can emit finish, but finish must - // always follow error - - finishMaybe(stream, state); - } -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - process.nextTick(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} // Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. - - -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} // if there's something in the buffer waiting, then process it - - -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - - state.pendingcb++; - state.lastBufferedRequest = null; - - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks - - if (state.corked) { - state.corked = 1; - this.uncork(); - } // ignore unnecessary end() calls. - - - if (!state.ending) endWritable(this, state, cb); - return this; -}; - -Object.defineProperty(Writable.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; - } -}); - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - - if (err) { - errorOrDestroy(stream, err); - } - - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} - -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.pendingcb++; - state.finalCalled = true; - process.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - - if (need) { - prefinish(stream, state); - - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well - var rState = stream._readableState; - - if (!rState || rState.autoDestroy && rState.endEmitted) { - stream.destroy(); - } - } - } - } - - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - - if (cb) { - if (state.finished) process.nextTick(cb);else stream.once('finish', cb); - } - - state.ended = true; - stream.writable = false; -} - -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } // reuse the free corkReq. - - - state.corkedRequestsFree.next = corkReq; -} - -Object.defineProperty(Writable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._writableState === undefined) { - return false; - } - - return this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._writableState.destroyed = value; - } -}); -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; - -Writable.prototype._destroy = function (err, cb) { - cb(err); -}; \ No newline at end of file diff --git a/lib/internal/streams/async_iterator.js b/lib/internal/streams/async_iterator.js deleted file mode 100644 index 9fb615a2f3..0000000000 --- a/lib/internal/streams/async_iterator.js +++ /dev/null @@ -1,207 +0,0 @@ -'use strict'; - -var _Object$setPrototypeO; - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var finished = require('./end-of-stream'); - -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); - -function createIterResult(value, done) { - return { - value: value, - done: done - }; -} - -function readAndResolve(iter) { - var resolve = iter[kLastResolve]; - - if (resolve !== null) { - var data = iter[kStream].read(); // we defer if data is null - // we can be expecting either 'end' or - // 'error' - - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); - } - } -} - -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); -} - -function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } - - iter[kHandlePromise](resolve, reject); - }, reject); - }; -} - -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { - get stream() { - return this[kStream]; - }, - - next: function next() { - var _this = this; - - // if we have detected an error in the meanwhile - // reject straight away - var error = this[kError]; - - if (error !== null) { - return Promise.reject(error); - } - - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); - } - - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); - } // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - - - var lastPromise = this[kLastPromise]; - var promise; - - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); - } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - var data = this[kStream].read(); - - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } - - promise = new Promise(this[kHandlePromise]); - } - - this[kLastPromise] = promise; - return promise; - } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } - - resolve(createIterResult(undefined, true)); - }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); - -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; - - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); - - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true - }), _Object$create)); - iterator[kLastPromise] = null; - finished(stream, function (err) { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise - // returned by next() and store the error - - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } - - iterator[kError] = err; - return; - } - - var resolve = iterator[kLastResolve]; - - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); - } - - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; -}; - -module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js deleted file mode 100644 index cdea425f19..0000000000 --- a/lib/internal/streams/buffer_list.js +++ /dev/null @@ -1,210 +0,0 @@ -'use strict'; - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -var _require = require('buffer'), - Buffer = _require.Buffer; - -var _require2 = require('util'), - inspect = _require2.inspect; - -var custom = inspect && inspect.custom || 'inspect'; - -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} - -module.exports = -/*#__PURE__*/ -function () { - function BufferList() { - _classCallCheck(this, BufferList); - - this.head = null; - this.tail = null; - this.length = 0; - } - - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; - } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - - while (p = p.next) { - ret += s + p.data; - } - - return ret; - } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - - return ret; - } // Consumes a specified amount of bytes or characters from the buffered data. - - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; - - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } - - return ret; - } - }, { - key: "first", - value: function first() { - return this.head.data; - } // Consumes a specified amount of characters from the buffered data. - - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } - - break; - } - - ++c; - } - - this.length -= c; - return ret; - } // Consumes a specified amount of bytes from the buffered data. - - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - - break; - } - - ++c; - } - - this.length -= c; - return ret; - } // Make sure the linked list only shows the minimal necessary information. - - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread({}, options, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); - } - }]); - - return BufferList; -}(); \ No newline at end of file diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js deleted file mode 100644 index 3268a16f3b..0000000000 --- a/lib/internal/streams/destroy.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; // undocumented cb() API, needed for core, not for public API - -function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); - } - } - - return this; - } // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - - - if (this._readableState) { - this._readableState.destroyed = true; - } // if this is a duplex stream mark the writable part as destroyed as well - - - if (this._writableState) { - this._writableState.destroyed = true; - } - - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); - } else { - process.nextTick(emitCloseNT, _this); - } - } else if (cb) { - process.nextTick(emitCloseNT, _this); - cb(err); - } else { - process.nextTick(emitCloseNT, _this); - } - }); - - return this; -} - -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); -} - -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); -} - -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; - } - - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } -} - -function emitErrorNT(self, err) { - self.emit('error', err); -} - -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - var rState = stream._readableState; - var wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); -} - -module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy -}; \ No newline at end of file diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js deleted file mode 100644 index 831f286d98..0000000000 --- a/lib/internal/streams/end-of-stream.js +++ /dev/null @@ -1,104 +0,0 @@ -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). -'use strict'; - -var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; - -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - - callback.apply(this, args); - }; -} - -function noop() {} - -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} - -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; - - var onlegacyfinish = function onlegacyfinish() { - if (!stream.writable) onfinish(); - }; - - var writableEnded = stream._writableState && stream._writableState.finished; - - var onfinish = function onfinish() { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; - - var readableEnded = stream._readableState && stream._readableState.endEmitted; - - var onend = function onend() { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; - - var onerror = function onerror(err) { - callback.call(stream, err); - }; - - var onclose = function onclose() { - var err; - - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; - - var onrequest = function onrequest() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -} - -module.exports = eos; \ No newline at end of file diff --git a/lib/internal/streams/from-browser.js b/lib/internal/streams/from-browser.js deleted file mode 100644 index a4ce56f3c9..0000000000 --- a/lib/internal/streams/from-browser.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = function () { - throw new Error('Readable.from is not available in the browser') -}; diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js deleted file mode 100644 index 6c41284416..0000000000 --- a/lib/internal/streams/from.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; - -function from(Readable, iterable, opts) { - var iterator; - - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - - var readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); // Reading boolean to protect against _read - // being called before last iteration completion. - - var reading = false; - - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; - - function next() { - return _next2.apply(this, arguments); - } - - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - var _ref = yield iterator.next(), - value = _ref.value, - done = _ref.done; - - if (done) { - readable.push(null); - } else if (readable.push((yield value))) { - next(); - } else { - reading = false; - } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); - } - - return readable; -} - -module.exports = from; \ No newline at end of file diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js deleted file mode 100644 index 6589909889..0000000000 --- a/lib/internal/streams/pipeline.js +++ /dev/null @@ -1,97 +0,0 @@ -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). -'use strict'; - -var eos; - -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - callback.apply(void 0, arguments); - }; -} - -var _require$codes = require('../../../errors').codes, - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; - -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} - -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} - -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - var closed = false; - stream.on('close', function () { - closed = true; - }); - if (eos === undefined) eos = require('./end-of-stream'); - eos(stream, { - readable: reading, - writable: writing - }, function (err) { - if (err) return callback(err); - closed = true; - callback(); - }); - var destroyed = false; - return function (err) { - if (closed) return; - if (destroyed) return; - destroyed = true; // request.destroy just do .end - .abort is what we want - - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; -} - -function call(fn) { - fn(); -} - -function pipe(from, to) { - return from.pipe(to); -} - -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); -} - -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; - } - - var callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; - - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); - } - - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); - }); - }); - return streams.reduce(pipe); -} - -module.exports = pipeline; \ No newline at end of file diff --git a/lib/internal/streams/state.js b/lib/internal/streams/state.js deleted file mode 100644 index 19887eb8a9..0000000000 --- a/lib/internal/streams/state.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict'; - -var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; - -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} - -function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); - } - - return Math.floor(hwm); - } // Default value - - - return state.objectMode ? 16 : 16 * 1024; -} - -module.exports = { - getHighWaterMark: getHighWaterMark -}; \ No newline at end of file diff --git a/lib/internal/streams/stream-browser.js b/lib/internal/streams/stream-browser.js deleted file mode 100644 index 9332a3fdae..0000000000 --- a/lib/internal/streams/stream-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('events').EventEmitter; diff --git a/lib/internal/streams/stream.js b/lib/internal/streams/stream.js deleted file mode 100644 index ce2ad5b6ee..0000000000 --- a/lib/internal/streams/stream.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('stream'); diff --git a/package.json b/package.json index 48fa4741e3..182f7fd189 100644 --- a/package.json +++ b/package.json @@ -2,69 +2,72 @@ "name": "readable-stream", "version": "3.6.0", "description": "Streams3, a user-land copy of the stream library from Node.js", - "main": "readable.js", - "engines": { - "node": ">= 6" - }, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "devDependencies": { - "@babel/cli": "^7.2.0", - "@babel/core": "^7.2.0", - "@babel/polyfill": "^7.0.0", - "@babel/preset-env": "^7.2.0", - "airtap": "^4.0.1", - "airtap-playwright": "^1.0.1", - "airtap-sauce": "^1.1.0", - "assert": "^1.4.0", - "bl": "^2.0.0", - "deep-strict-equal": "^0.2.0", - "events.once": "^2.0.2", - "glob": "^7.1.2", - "gunzip-maybe": "^1.4.1", - "hyperquest": "^2.1.3", - "lolex": "^2.6.0", - "nyc": "^11.0.0", - "pump": "^3.0.0", - "rimraf": "^2.6.2", - "tap": "^12.0.0", - "tape": "^4.9.0", - "tar-fs": "^1.16.2", - "util-promisify": "^2.1.0" - }, - "scripts": { - "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", - "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", - "test-browsers": "airtap test/browser.js", - "test-browsers-local": "airtap -p local test/browser.js", - "cover": "nyc npm test", - "report": "nyc report --reporter=lcov", - "update-browser-errors": "babel -o errors-browser.js errors.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream" - }, + "author": "NearForm Ltd", + "homepage": "https://github.com/nodejs/readable-stream", + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "https://choosealicense.com/licenses/mit/" + } + ], "keywords": [ "readable", "stream", "pipe" ], + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "main": "lib/index.js", + "files": [ + "lib", + "LICENSE", + "README.md" + ], "browser": { "util": false, "worker_threads": false, - "./errors": "./errors-browser.js", - "./readable.js": "./readable-browser.js", - "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", - "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + "./lib/index.js": "./lib/browser.js", + "./lib/internal/errors.js": "./lib/internal/errors-browser.js" + }, + "scripts": { + "build": "node build/build.mjs", + "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", + "test-browsers": "airtap test/browser/test-*.js", + "test-browsers-local": "airtap -p local test/browser/test-*.js", + "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", + "format": "prettier -w src", + "lint": "eslint src" + }, + "dependencies": { + "inherits": "^2.0.4", + "util-deprecate": "^1.0.2" + }, + "devDependencies": { + "airtap": "^4.0.4", + "airtap-playwright": "^1.0.1", + "c8": "^7.11.0", + "eslint": "^7.32.0", + "eslint-config-standard": "^16.0.3", + "lolex": "^6.0.0", + "prettier": "^2.6.2", + "tap": "^16.0.1", + "tape": "^5.5.2", + "tar": "^6.1.11", + "undici": "^5.0.0", + "util-promisify": "^3.0.0" + }, + "engines": { + "node": ">= 14.15.0" }, "nyc": { "include": [ - "lib/**.js" + "lib/**/*.js" ] - }, - "license": "MIT" + } } diff --git a/prettier.config.cjs b/prettier.config.cjs new file mode 100644 index 0000000000..3f8a79d266 --- /dev/null +++ b/prettier.config.cjs @@ -0,0 +1,7 @@ +module.exports = { + printWidth: 120, + semi: false, + singleQuote: true, + bracketSpacing: true, + trailingComma: 'none' +} diff --git a/readable-browser.js b/readable-browser.js deleted file mode 100644 index adbf60de83..0000000000 --- a/readable-browser.js +++ /dev/null @@ -1,9 +0,0 @@ -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); -exports.finished = require('./lib/internal/streams/end-of-stream.js'); -exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/readable.js b/readable.js deleted file mode 100644 index 9e0ca120de..0000000000 --- a/readable.js +++ /dev/null @@ -1,16 +0,0 @@ -var Stream = require('stream'); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = require('./lib/_stream_readable.js'); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = require('./lib/_stream_writable.js'); - exports.Duplex = require('./lib/_stream_duplex.js'); - exports.Transform = require('./lib/_stream_transform.js'); - exports.PassThrough = require('./lib/_stream_passthrough.js'); - exports.finished = require('./lib/internal/streams/end-of-stream.js'); - exports.pipeline = require('./lib/internal/streams/pipeline.js'); -} diff --git a/src/browser.js b/src/browser.js new file mode 100644 index 0000000000..eb58b61bfe --- /dev/null +++ b/src/browser.js @@ -0,0 +1,16 @@ +'use strict' + +const Readable = require('./_stream_readable') + +module.exports = Readable +module.exports.Stream = require('./internal/streams/legacy') +module.exports.Readable = Readable +module.exports.Writable = require('./_stream_writable') +module.exports.Duplex = require('./_stream_duplex') +module.exports.Transform = require('./_stream_transform') +module.exports.PassThrough = require('./_stream_passthrough') +module.exports.finished = require('./internal/streams/end-of-stream') +module.exports.pipeline = require('./internal/streams/pipeline') + +// Allow default importing +module.exports.default = module.exports diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000000..7cdb468fad --- /dev/null +++ b/src/index.js @@ -0,0 +1,31 @@ +'use strict' + +const Stream = require('stream') + +if (Stream && process.env.READABLE_STREAM === 'disable') { + module.exports = Stream.Readable + + module.exports.Stream = require('./internal/streams/legacy') + module.exports.Readable = Stream.Readable + module.exports.Writable = Stream.Writable + module.exports.Duplex = Stream.Duplex + module.exports.Transform = Stream.Transform + module.exports.PassThrough = Stream.PassThrough + module.exports.finished = Stream.finished + module.exports.pipeline = Stream.pipeline +} else { + const Readable = require('./_stream_readable') + + module.exports = Readable + module.exports.Stream = require('./internal/streams/legacy') + module.exports.Readable = Readable + module.exports.Writable = require('./_stream_writable') + module.exports.Duplex = require('./_stream_duplex') + module.exports.Transform = require('./_stream_transform') + module.exports.PassThrough = require('./_stream_passthrough') + module.exports.finished = require('./internal/streams/end-of-stream') + module.exports.pipeline = require('./internal/streams/pipeline') +} + +// Allow default importing +module.exports.default = module.exports diff --git a/src/test/browser/test-stream-big-packet.js b/src/test/browser/test-stream-big-packet.js new file mode 100644 index 0000000000..38e4b2e2ac --- /dev/null +++ b/src/test/browser/test-stream-big-packet.js @@ -0,0 +1,68 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Transform } = require('../../lib') + +test('big packet', function (t) { + t.plan(3) + + let passed = false + + function PassThrough() { + Transform.call(this) + } + inherits(PassThrough, Transform) + + PassThrough.prototype._transform = function (chunk, encoding, done) { + this.push(chunk) + done() + } + + function TestStream() { + Transform.call(this) + } + inherits(TestStream, Transform) + + TestStream.prototype._transform = function (chunk, encoding, done) { + if (!passed) { + // Char 'a' only exists in the last write + passed = indexOf(chunk.toString(), 'a') >= 0 + } + if (passed) { + t.ok(passed) + } + done() + } + + const s1 = new PassThrough() + const s2 = new PassThrough() + const s3 = new TestStream() + + s1.pipe(s3) + // Don't let s2 auto close which may close s3 + s2.pipe(s3, { end: false }) + + // We must write a buffer larger than highWaterMark + const big = Buffer.alloc(s1._writableState.highWaterMark + 1) + big.fill('x') + + // Since big is larger than highWaterMark, it will be buffered internally. + t.notOk(s1.write(big)) + + // 'tiny' is small enough to pass through internal buffer. + t.ok(s2.write('tiny')) + + // Write some small data in next IO loop, which will never be written to s3 + // Because 'drain' event is not emitted from s1 and s1 is still paused + setImmediate(s1.write.bind(s1), 'later') + + function indexOf(xs, x) { + for (let i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) { + return i + } + } + return -1 + } +}) diff --git a/src/test/browser/test-stream-big-push.js b/src/test/browser/test-stream-big-push.js new file mode 100644 index 0000000000..46b2524df6 --- /dev/null +++ b/src/test/browser/test-stream-big-push.js @@ -0,0 +1,70 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('big push', function (t) { + t.plan(10) + + const str = 'asdfasdfasdfasdfasdf' + + const r = new Readable({ + highWaterMark: 5, + encoding: 'utf8' + }) + + let reads = 0 + let eofed = false + let ended = false + + r._read = function (n) { + if (reads === 0) { + setTimeout(function () { + r.push(str) + }) + reads++ + } else if (reads === 1) { + const ret = r.push(str) + t.equal(ret, false) + reads++ + } else { + t.notOk(eofed) + eofed = true + r.push(null) + } + } + + r.on('end', function () { + ended = true + }) + + // push some data in to start. + // we've never gotten any read event at this point. + const ret = r.push(str) + + // should be false. > hwm + t.notOk(ret) + let chunk = r.read() + t.equal(chunk, str) + + chunk = r.read() + t.equal(chunk, null) + + r.once('readable', function () { + // this time, we'll get *all* the remaining data, because + // it's been added synchronously, as the read WOULD take + // us below the hwm, and so it triggered a _read() again, + // which synchronously added more, which we then return. + chunk = r.read() + t.equal(chunk, str + str) + + chunk = r.read() + t.equal(chunk, null) + }) + + r.on('end', function () { + t.ok(eofed) + t.ok(ended) + t.equal(reads, 2) + }) +}) diff --git a/src/test/browser/test-stream-duplex.js b/src/test/browser/test-stream-duplex.js new file mode 100644 index 0000000000..ec17de6c4e --- /dev/null +++ b/src/test/browser/test-stream-duplex.js @@ -0,0 +1,36 @@ +'use strict' + +const test = require('tape') +const { Duplex } = require('../../lib') + +test('duplex', function (t) { + t.plan(4) + + const stream = new Duplex({ objectMode: true }) + + t.ok(stream._readableState.objectMode) + t.ok(stream._writableState.objectMode) + + let written + let read + + stream._write = function (obj, _, cb) { + written = obj + cb() + } + + stream._read = function () {} + + stream.on('data', function (obj) { + read = obj + }) + + stream.on('end', function () { + t.equal(read.val, 1) + t.equal(written.val, 2) + }) + + stream.push({ val: 1 }) + stream.end({ val: 2 }) + stream.push(null) +}) diff --git a/src/test/browser/test-stream-end-paused.js b/src/test/browser/test-stream-end-paused.js new file mode 100644 index 0000000000..927fa24943 --- /dev/null +++ b/src/test/browser/test-stream-end-paused.js @@ -0,0 +1,30 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('end pause', function (t) { + t.plan(2) + + const stream = new Readable() + let calledRead = false + + stream._read = function () { + t.notOk(calledRead) + calledRead = true + this.push(null) + } + + stream.on('data', function () { + throw new Error('should not ever get data') + }) + + stream.pause() + + setTimeout(function () { + stream.on('end', function () { + t.ok(calledRead) + }) + stream.resume() + }) +}) diff --git a/src/test/browser/test-stream-finished.js b/src/test/browser/test-stream-finished.js new file mode 100644 index 0000000000..8b9190d8c9 --- /dev/null +++ b/src/test/browser/test-stream-finished.js @@ -0,0 +1,65 @@ +'use strict' + +const test = require('tape') +const { Writable, Readable, Transform, finished } = require('../../lib') + +test('readable finished', function (t) { + t.plan(1) + + const rs = new Readable({ + read: function read() {} + }) + + finished(rs, (err) => { + t.ifErr(err) + }) + + rs.push(null) + rs.resume() +}) + +test('writable finished', function (t) { + t.plan(1) + + const ws = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + + finished(ws, (err) => { + t.ifErr(err) + }) + + ws.end() +}) + +test('transform finished', function (t) { + t.plan(3) + + const tr = new Transform({ + transform: function transform(data, enc, cb) { + cb() + } + }) + + let finish = false + let ended = false + + tr.on('end', function () { + ended = true + }) + + tr.on('finish', function () { + finish = true + }) + + finished(tr, (err) => { + t.ifErr(err) + t.ok(finish) + t.ok(ended) + }) + + tr.end() + tr.resume() +}) diff --git a/src/test/browser/test-stream-ispaused.js b/src/test/browser/test-stream-ispaused.js new file mode 100644 index 0000000000..27cb33ad11 --- /dev/null +++ b/src/test/browser/test-stream-ispaused.js @@ -0,0 +1,27 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +test('is paused', function (t) { + t.plan(4) + + const readable = new stream.Readable() + + // _read is a noop, here. + readable._read = () => {} + + // default state of a stream is not "paused" + t.notOk(readable.isPaused()) + + // make the stream start flowing... + readable.on('data', () => {}) + + // still not paused. + t.notOk(readable.isPaused()) + + readable.pause() + t.ok(readable.isPaused()) + readable.resume() + t.notOk(readable.isPaused()) +}) diff --git a/src/test/browser/test-stream-pipe-after-end.js b/src/test/browser/test-stream-pipe-after-end.js new file mode 100644 index 0000000000..24401fb140 --- /dev/null +++ b/src/test/browser/test-stream-pipe-after-end.js @@ -0,0 +1,67 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable, Writable } = require('../../lib') + +test('pipe after end', function (t) { + t.plan(4) + + function TestReadable(opt) { + if (!(this instanceof TestReadable)) { + return new TestReadable(opt) + } + Readable.call(this, opt) + this._ended = false + } + inherits(TestReadable, Readable) + + TestReadable.prototype._read = function (n) { + if (this._ended) { + this.emit('error', new Error('_read called twice')) + } + this._ended = true + this.push(null) + } + + function TestWritable(opt) { + if (!(this instanceof TestWritable)) { + return new TestWritable(opt) + } + Writable.call(this, opt) + this._written = [] + } + inherits(TestWritable, Writable) + + TestWritable.prototype._write = function (chunk, encoding, cb) { + this._written.push(chunk) + cb() + } + + // this one should not emit 'end' until we read() from it later. + const ender = new TestReadable() + let enderEnded = false + + // what happens when you pipe() a Readable that's already ended? + const piper = new TestReadable() + // pushes EOF null, and length=0, so this will trigger 'end' + piper.read() + + setTimeout(function () { + ender.on('end', function () { + enderEnded = true + t.ok(true, 'enderEnded') + }) + t.notOk(enderEnded) + + const c = ender.read() + t.equal(c, null) + + const w = new TestWritable() + w.on('finish', function () { + t.ok(true, 'writableFinished') + }) + + piper.pipe(w) + }) +}) diff --git a/src/test/browser/test-stream-pipe-cleanup-pause.js b/src/test/browser/test-stream-pipe-cleanup-pause.js new file mode 100644 index 0000000000..e49cf4c5f1 --- /dev/null +++ b/src/test/browser/test-stream-pipe-cleanup-pause.js @@ -0,0 +1,46 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +test('pipe cleanup pause', function (t) { + t.plan(3) + + const reader = new stream.Readable() + const writer1 = new stream.Writable() + const writer2 = new stream.Writable() + + // 560000 is chosen here because it is larger than the (default) highWaterMark + // and will cause `.write()` to return false + // See: https://github.com/nodejs/node/issues/2323 + const buffer = Buffer.alloc(560000) + + reader._read = function () {} + + writer1._write = function (chunk, encoding, cb) { + this.emit('chunk-received') + cb() + } + + writer1.on('chunk-received', function () { + reader.unpipe(writer1) + reader.pipe(writer2) + reader.push(buffer) + + setImmediate(function () { + reader.push(buffer) + + setImmediate(function () { + reader.push(buffer) + }) + }) + }) + + writer2._write = function (chunk, encoding, cb) { + t.ok(true) + cb() + } + + reader.pipe(writer1) + reader.push(buffer) +}) diff --git a/src/test/browser/test-stream-pipe-cleanup.js b/src/test/browser/test-stream-pipe-cleanup.js new file mode 100644 index 0000000000..8350a8297a --- /dev/null +++ b/src/test/browser/test-stream-pipe-cleanup.js @@ -0,0 +1,115 @@ +'use strict' +// This test asserts that Stream.prototype.pipe does not leave listeners +// hanging on the source or dest. + +const test = require('tape') +const inherits = require('inherits') +const { Stream } = require('../../lib') + +test('pipe cleanup', function (t) { + t.plan(27) + + if (/^v0\.8\./.test(process.version)) { + return + } + + function Writable() { + this.writable = true + this.endCalls = 0 + Stream.call(this) + } + inherits(Writable, Stream) + + Writable.prototype.end = function () { + this.endCalls++ + } + + Writable.prototype.destroy = function () { + this.endCalls++ + } + + function Readable() { + this.readable = true + Stream.call(this) + } + + inherits(Readable, Stream) + + Readable.prototype._read = function () {} + + function Duplex() { + this.readable = true + Writable.call(this) + } + + inherits(Duplex, Writable) + + Duplex.prototype._read = function () {} + + let i = 0 + let r + let w = new Writable() + const limit = 100 + + for (i = 0; i < limit; i++) { + r = new Readable() + r.pipe(w) + r.emit('end') + } + t.equal(0, r.listeners('end').length) + t.equal(limit, w.endCalls) + + w.endCalls = 0 + + for (i = 0; i < limit; i++) { + r = new Readable() + r.pipe(w) + r.emit('close') + } + t.equal(0, r.listeners('close').length) + t.equal(limit, w.endCalls) + + w.endCalls = 0 + + r = new Readable() + + for (i = 0; i < limit; i++) { + w = new Writable() + r.pipe(w) + w.emit('close') + } + t.equal(0, w.listeners('close').length) + + r = new Readable() + w = new Writable() + const d = new Duplex() + r.pipe(d) // pipeline A + d.pipe(w) // pipeline B + t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup + t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup + t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup + t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup + t.equal(w.listeners('end').length, 0) + t.equal(w.listeners('close').length, 1) // B.cleanup + + r.emit('end') + t.equal(d.endCalls, 1) + t.equal(w.endCalls, 0) + t.equal(r.listeners('end').length, 0) + t.equal(r.listeners('close').length, 0) + t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup + t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup + t.equal(w.listeners('end').length, 0) + t.equal(w.listeners('close').length, 1) // B.cleanup + + d.emit('end') + t.equal(d.endCalls, 1) + t.equal(w.endCalls, 1) + t.equal(r.listeners('end').length, 0) + t.equal(r.listeners('close').length, 0) + t.equal(d.listeners('end').length, 0) + t.equal(d.listeners('close').length, 0) + t.equal(w.listeners('end').length, 0) + t.equal(w.listeners('close').length, 0) + d.end() +}) diff --git a/src/test/browser/test-stream-pipe-error-handling.js b/src/test/browser/test-stream-pipe-error-handling.js new file mode 100644 index 0000000000..14016936a2 --- /dev/null +++ b/src/test/browser/test-stream-pipe-error-handling.js @@ -0,0 +1,99 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable, Stream } = require('../../lib') + +test('Error Listener Catches', function (t) { + t.plan(1) + + const source = new Stream() + const dest = new Stream() + + source._read = function () {} + source.pipe(dest) + + let gotErr = null + source.on('error', function (err) { + gotErr = err + }) + + const err = new Error('This stream turned into bacon.') + source.emit('error', err) + t.strictEqual(gotErr, err) +}) + +test('Error WithoutListener Throws', function (t) { + t.plan(1) + + const source = new Stream() + const dest = new Stream() + + source._read = function () {} + source.pipe(dest) + + const err = new Error('This stream turned into bacon.') + + let gotErr = null + try { + source.emit('error', err) + } catch (e) { + gotErr = e + } + + t.strictEqual(gotErr, err) +}) + +test('Error With Removed Listener Throws', function (t) { + t.plan(2) + + const r = new Readable() + const w = new Writable() + let removed = false + + r._read = function () { + setTimeout(function () { + t.ok(removed) + t.throws(function () { + w.emit('error', new Error('fail')) + }) + }) + } + + w.on('error', myOnError) + r.pipe(w) + w.removeListener('error', myOnError) + removed = true + + function myOnError(er) { + throw new Error('this should not happen') + } +}) + +test('Error With Removed Listener Throws', function (t) { + t.plan(2) + + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) + w._write = function () {} + + r.pipe(w) + // Removing some OTHER random listener should not do anything + w.removeListener('error', function () {}) + removed = true + + function myOnError(er) { + t.notOk(caught) + caught = true + } +}) diff --git a/src/test/browser/test-stream-pipe-event.js b/src/test/browser/test-stream-pipe-event.js new file mode 100644 index 0000000000..e39109f645 --- /dev/null +++ b/src/test/browser/test-stream-pipe-event.js @@ -0,0 +1,34 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Stream } = require('../../lib') + +test('pipe event', function (t) { + t.plan(1) + + function Writable() { + this.writable = true + Stream.call(this) + } + inherits(Writable, Stream) + + function Readable() { + this.readable = true + Stream.call(this) + } + inherits(Readable, Stream) + + let passed = false + + const w = new Writable() + w.on('pipe', function (src) { + passed = true + }) + + const r = new Readable() + r._read = function () {} + r.pipe(w) + + t.ok(passed) +}) diff --git a/src/test/browser/test-stream-pipe-without-listenerCount.js b/src/test/browser/test-stream-pipe-without-listenerCount.js new file mode 100644 index 0000000000..448d362a7c --- /dev/null +++ b/src/test/browser/test-stream-pipe-without-listenerCount.js @@ -0,0 +1,20 @@ +'use strict' + +const test = require('tape') +const { Stream } = require('../../lib') + +test('pipe without listenerCount on read', function (t) { + t.plan(1) + + const r = new Stream({ + read: function () {} + }) + r.listenerCount = undefined + + const w = new Stream() + w.on('pipe', function () { + r.emit('error', new Error('Readable Error')) + }) + + t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function') +}) diff --git a/src/test/browser/test-stream-pipeline.js b/src/test/browser/test-stream-pipeline.js new file mode 100644 index 0000000000..0e1180a7f2 --- /dev/null +++ b/src/test/browser/test-stream-pipeline.js @@ -0,0 +1,109 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable, pipeline } = require('../../lib') + +test('pipeline', function (t) { + t.plan(3) + + let finished = false + + const processed = [] + const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] + + const read = new Readable({ + read: function read() {} + }) + + const write = new Writable({ + write: function write(data, enc, cb) { + processed.push(data) + cb() + } + }) + + write.on('finish', function () { + finished = true + }) + + for (let i = 0; i < expected.length; i++) { + read.push(expected[i]) + } + + read.push(null) + pipeline(read, write, (err) => { + t.ifErr(err) + t.ok(finished) + t.deepEqual(processed, expected) + }) +}) + +test('pipeline missing args', function (t) { + t.plan(3) + + const _read = new Readable({ + read: function read() {} + }) + + t.throws(function () { + pipeline(_read, function () {}) + }) + + t.throws(function () { + pipeline(function () {}) + }) + + t.throws(function () { + pipeline() + }) +}) + +test('pipeline error', function (t) { + t.plan(1) + + const _read2 = new Readable({ + read: function read() {} + }) + + const _write = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + + _read2.push('data') + + setImmediate(function () { + return _read2.destroy() + }) + + pipeline(_read2, _write, (err) => { + t.equal(err.message, 'Premature close') + }) +}) + +test('pipeline destroy', function (t) { + t.plan(2) + + const _read3 = new Readable({ + read: function read() {} + }) + + const _write2 = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + + _read3.push('data') + + setImmediate(function () { + return _read3.destroy(new Error('kaboom')) + }) + + const dst = pipeline(_read3, _write2, (err) => { + t.equal(err.message, 'kaboom') + }) + + t.equal(dst, _write2) +}) diff --git a/src/test/browser/test-stream-push-order.js b/src/test/browser/test-stream-push-order.js new file mode 100644 index 0000000000..e5aef44618 --- /dev/null +++ b/src/test/browser/test-stream-push-order.js @@ -0,0 +1,32 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('push order', function (t) { + t.plan(1) + + const s = new Readable({ + highWaterMark: 20, + encoding: 'ascii' + }) + + const list = ['1', '2', '3', '4', '5', '6'] + + s._read = function (n) { + const one = list.shift() + if (!one) { + s.push(null) + } else { + const two = list.shift() + s.push(one) + s.push(two) + } + } + + s.read(0) + + setTimeout(function () { + t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6') + }) +}) diff --git a/src/test/browser/test-stream-push-strings.js b/src/test/browser/test-stream-push-strings.js new file mode 100644 index 0000000000..5344cdf1e0 --- /dev/null +++ b/src/test/browser/test-stream-push-strings.js @@ -0,0 +1,55 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable } = require('../../lib') + +test('push strings', function (t) { + t.plan(2) + + function MyStream(options) { + Readable.call(this, options) + this._chunks = 3 + } + + inherits(MyStream, Readable) + + MyStream.prototype._read = function (n) { + switch (this._chunks--) { + case 0: + return this.push(null) + case 1: + return setTimeout( + function () { + this.push('last chunk') + }.bind(this), + 100 + ) + case 2: + return this.push('second to last chunk') + case 3: + return process.nextTick( + function () { + this.push('first chunk') + }.bind(this) + ) + default: + throw new Error('?') + } + } + const expect = ['first chunksecond to last chunk', 'last chunk'] + + const ms = new MyStream() + const results = [] + ms.on('readable', function () { + let chunk + while ((chunk = ms.read()) !== null) { + results.push(chunk + '') + } + }) + + ms.on('end', function () { + t.equal(ms._chunks, -1) + t.deepEqual(results, expect) + }) +}) diff --git a/src/test/browser/test-stream-readable-constructor-set-methods.js b/src/test/browser/test-stream-readable-constructor-set-methods.js new file mode 100644 index 0000000000..9d1fd3f234 --- /dev/null +++ b/src/test/browser/test-stream-readable-constructor-set-methods.js @@ -0,0 +1,23 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('readable constructor set methods', function (t) { + t.plan(2) + + let _readCalled = false + + function _read(n) { + _readCalled = true + this.push(null) + } + + const r = new Readable({ read: _read }) + r.resume() + + setTimeout(function () { + t.equal(r._read, _read) + t.ok(_readCalled) + }) +}) diff --git a/src/test/browser/test-stream-readable-event.js b/src/test/browser/test-stream-readable-event.js new file mode 100644 index 0000000000..ae611f6b83 --- /dev/null +++ b/src/test/browser/test-stream-readable-event.js @@ -0,0 +1,105 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('readable events - first', (t) => { + t.plan(3) + + // First test, not reading when the readable is added. + // make sure that on('readable', ...) triggers a readable event. + const r = new Readable({ + highWaterMark: 3 + }) + + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')) + + let caughtReadable = false + setTimeout(function () { + // we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + + t.ok(caughtReadable) + }) + }) + }) +}) + +test('readable events - second', (t) => { + t.plan(3) + + // second test, make sure that readable is re-emitted if there's + // already a length, while it IS reading. + + const r = new Readable({ + highWaterMark: 3 + }) + + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('bl')) + + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.ok(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.ok(_readCalled) + + t.ok(caughtReadable) + }) + }) + }) +}) + +test('readable events - third', (t) => { + t.plan(3) + + // Third test, not reading when the stream has not passed + // the highWaterMark but *has* reached EOF. + const r = new Readable({ + highWaterMark: 30 + }) + + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')) + r.push(null) + + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + + t.ok(caughtReadable) + }) + }) + }) +}) diff --git a/src/test/browser/test-stream-sync-write.js b/src/test/browser/test-stream-sync-write.js new file mode 100644 index 0000000000..92e327eed5 --- /dev/null +++ b/src/test/browser/test-stream-sync-write.js @@ -0,0 +1,46 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Writable } = require('../../lib') + +test('should bea ble to write sync', function (t) { + t.plan(2) + + let internalCalls = 0 + let externalCalls = 0 + + const InternalStream = function () { + Writable.call(this) + } + inherits(InternalStream, Writable) + + InternalStream.prototype._write = function (chunk, encoding, callback) { + internalCalls++ + callback() + } + + const internalStream = new InternalStream() + + const ExternalStream = function (writable) { + this._writable = writable + Writable.call(this) + } + inherits(ExternalStream, Writable) + + ExternalStream.prototype._write = function (chunk, encoding, callback) { + externalCalls++ + this._writable.write(chunk, encoding, callback) + } + + const externalStream = new ExternalStream(internalStream) + + for (let i = 0; i < 2000; i++) { + externalStream.write(i.toString()) + } + + externalStream.end(() => { + t.equal(internalCalls, 2000) + t.equal(externalCalls, 2000) + }) +}) diff --git a/src/test/browser/test-stream-transform-constructor-set-methods.js b/src/test/browser/test-stream-transform-constructor-set-methods.js new file mode 100644 index 0000000000..4cefa63dff --- /dev/null +++ b/src/test/browser/test-stream-transform-constructor-set-methods.js @@ -0,0 +1,35 @@ +'use strict' + +const test = require('tape') +const { Transform } = require('../../lib') + +test('transform constructor set methods', function (t) { + t.plan(4) + + let _transformCalled = false + function _transform(d, e, n) { + _transformCalled = true + n() + } + + let _flushCalled = false + function _flush(n) { + _flushCalled = true + n() + } + + const tr = new Transform({ + transform: _transform, + flush: _flush + }) + + tr.end(Buffer.from('blerg')) + tr.resume() + + tr.on('end', function () { + t.equal(tr._transform, _transform) + t.equal(tr._flush, _flush) + t.ok(_transformCalled) + t.ok(_flushCalled) + }) +}) diff --git a/src/test/browser/test-stream-transform-objectmode-falsey-value.js b/src/test/browser/test-stream-transform-objectmode-falsey-value.js new file mode 100644 index 0000000000..b496acb6de --- /dev/null +++ b/src/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -0,0 +1,35 @@ +'use strict' + +const test = require('tape') +const { PassThrough } = require('../../lib') + +test('transform objectmode falsey value', function (t) { + t.plan(13) + + const src = new PassThrough({ objectMode: true }) + const tx = new PassThrough({ objectMode: true }) + const dest = new PassThrough({ objectMode: true }) + + const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + const results = [] + dest.on('end', function () { + t.deepEqual(results, expect) + }) + + dest.on('data', function (x) { + results.push(x) + }) + + src.pipe(tx).pipe(dest) + + let i = -1 + const int = setInterval(function () { + if (i > 10) { + src.end() + clearInterval(int) + } else { + t.ok(true) + src.write(i++) + } + }, 10) +}) diff --git a/src/test/browser/test-stream-transform-split-objectmode.js b/src/test/browser/test-stream-transform-split-objectmode.js new file mode 100644 index 0000000000..e23beb53ed --- /dev/null +++ b/src/test/browser/test-stream-transform-split-objectmode.js @@ -0,0 +1,57 @@ +'use strict' + +const test = require('tape') +const { Transform } = require('../../lib') + +test('transform split objectmode', function (t) { + t.plan(10) + + const parser = new Transform({ readableObjectMode: true }) + + t.ok(parser._readableState.objectMode, 'parser 1') + t.notOk(parser._writableState.objectMode, 'parser 2') + t.equals(parser._readableState.highWaterMark, 16, 'parser 3') + t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4') + + parser._transform = function (chunk, enc, callback) { + callback(null, { val: chunk[0] }) + } + + let parsed + + parser.on('data', function (obj) { + parsed = obj + }) + + parser.end(Buffer.from([42])) + + parser.on('end', function () { + t.equals(parsed.val, 42, 'parser ended') + }) + + const serializer = new Transform({ writableObjectMode: true }) + + t.notOk(serializer._readableState.objectMode, 'serializer 1') + t.ok(serializer._writableState.objectMode, 'serializer 2') + t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3') + t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4') + + serializer._transform = function (obj, _, callback) { + callback(null, Buffer.from([obj.val])) + } + + let serialized + + serializer.on('data', function (chunk) { + serialized = chunk + }) + + serializer.write({ val: 42 }) + + serializer.on('end', function () { + t.equals(serialized[0], 42, 'searlizer ended') + }) + setImmediate(function () { + serializer.end() + }) +}) diff --git a/src/test/browser/test-stream-unshift-empty-chunk.js b/src/test/browser/test-stream-unshift-empty-chunk.js new file mode 100644 index 0000000000..b95f11bae5 --- /dev/null +++ b/src/test/browser/test-stream-unshift-empty-chunk.js @@ -0,0 +1,62 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('unshift empty chunk', function (t) { + t.plan(1) + + const r = new Readable() + let nChunks = 10 + const chunk = Buffer.alloc(10) + chunk.fill('x') + + r._read = function (n) { + setTimeout(function () { + r.push(--nChunks === 0 ? null : chunk) + }) + } + + let readAll = false + const seen = [] + r.on('readable', function () { + let chunk + while ((chunk = r.read())) { + seen.push(chunk.toString()) + // simulate only reading a certain amount of the data, + // and then putting the rest of the chunk back into the + // stream, like a parser might do. We just fill it with + // 'y' so that it's easy to see which bits were touched, + // and which were not. + const putBack = Buffer.alloc(readAll ? 0 : 5) + putBack.fill('y') + readAll = !readAll + r.unshift(putBack) + } + }) + + const expect = [ + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy' + ] + + r.on('end', function () { + t.deepEqual(seen, expect) + }) +}) diff --git a/src/test/browser/test-stream-unshift-read-race.js b/src/test/browser/test-stream-unshift-read-race.js new file mode 100644 index 0000000000..3afa2beaed --- /dev/null +++ b/src/test/browser/test-stream-unshift-read-race.js @@ -0,0 +1,122 @@ +'use strict' + +// This test verifies that: +// 1. unshift() does not cause colliding _read() calls. +// 2. unshift() after the 'end' event is an error, but after the EOF +// signalling null, it is ok, and just creates a new readable chunk. +// 3. push() after the EOF signaling null is an error. +// 4. _read() is not called after pushing the EOF null chunk. + +const test = require('tape') +const stream = require('../../lib') + +test('unshift read race', function (t) { + t.plan(141) + + const hwm = 10 + const r = stream.Readable({ highWaterMark: hwm }) + const chunks = 10 + + const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2)) + for (let i = 0; i < data.length; i++) { + const c = 'asdf'.charCodeAt(i % 4) + data[i] = c + } + + let pos = 0 + let pushedNull = false + r._read = function (n) { + t.notOk(pushedNull, '_read after null push') + + // every third chunk is fast + push(!(chunks % 3)) + + function push(fast) { + t.notOk(pushedNull, 'push() after null push') + const c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length)) + pushedNull = c === null + if (fast) { + pos += n + r.push(c) + if (c === null) { + pushError() + } + } else { + setTimeout(function () { + pos += n + r.push(c) + if (c === null) { + pushError() + } + }) + } + } + } + + function pushError() { + t.throws(function () { + r.push(Buffer.alloc(1)) + }) + } + + const w = stream.Writable() + const written = [] + w._write = function (chunk, encoding, cb) { + written.push(chunk.toString()) + cb() + } + + let ended = false + r.on('end', function () { + t.notOk(ended, 'end emitted more than once') + t.throws(function () { + r.unshift(Buffer.alloc(1)) + }) + ended = true + w.end() + }) + + r.on('readable', function () { + let chunk + while ((chunk = r.read(10)) !== null) { + w.write(chunk) + if (chunk.length > 4) { + r.unshift(Buffer.from('1234')) + } + } + }) + + w.on('finish', function () { + // each chunk should start with 1234, and then be asfdasdfasdf... + // The first got pulled out before the first unshift('1234'), so it's + // lacking that piece. + t.equal(written[0], 'asdfasdfas') + let asdf = 'd' + + // console.error('0: %s', written[0]); + for (let i = 1; i < written.length; i++) { + // console.error('%s: %s', i.toString(32), written[i]); + t.equal(written[i].slice(0, 4), '1234') + for (let j = 4; j < written[i].length; j++) { + const c = written[i].charAt(j) + t.equal(c, asdf) + switch (asdf) { + case 'a': + asdf = 's' + break + case 's': + asdf = 'd' + break + case 'd': + asdf = 'f' + break + case 'f': + asdf = 'a' + break + } + } + } + + t.equal(written.length, 18) + }) +}) diff --git a/src/test/browser/test-stream-writable-change-default-encoding.js b/src/test/browser/test-stream-writable-change-default-encoding.js new file mode 100644 index 0000000000..5f664be178 --- /dev/null +++ b/src/test/browser/test-stream-writable-change-default-encoding.js @@ -0,0 +1,69 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const stream = require('../../lib') + +inherits(MyWritable, stream.Writable) + +MyWritable.prototype._write = function (chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding) + callback() +} + +function MyWritable(fn, options) { + stream.Writable.call(this, options) + this.fn = fn +} + +test('defaultCondingIsUtf8', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'utf8') + }, + { decodeStrings: false } + ) + m.write('foo') + m.end() +}) + +test('changeDefaultEncodingToAscii', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { decodeStrings: false } + ) + m.setDefaultEncoding('ascii') + m.write('bar') + m.end() +}) + +test('changeDefaultEncodingToInvalidValue', (t) => { + t.plan(1) + + t.throws(function () { + const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false }) + m.setDefaultEncoding({}) + m.write('bar') + m.end() + }, TypeError) +}) + +test('checkVairableCaseEncoding', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { decodeStrings: false } + ) + m.setDefaultEncoding('AsCii') + m.write('bar') + m.end() +}) diff --git a/src/test/browser/test-stream-writable-constructor-set-methods.js b/src/test/browser/test-stream-writable-constructor-set-methods.js new file mode 100644 index 0000000000..71c6f55e6e --- /dev/null +++ b/src/test/browser/test-stream-writable-constructor-set-methods.js @@ -0,0 +1,38 @@ +'use strict' + +const test = require('tape') +const { Writable } = require('../../lib') + +test('writable constructor set methods', function (t) { + t.plan(5) + + let _writeCalled = false + function _write(d, e, n) { + _writeCalled = true + } + + const w = new Writable({ write: _write }) + w.end(Buffer.from('blerg')) + + let _writevCalled = false + let dLength = 0 + function _writev(d, n) { + dLength = d.length + _writevCalled = true + } + + const w2 = new Writable({ writev: _writev }) + w2.cork() + + w2.write(Buffer.from('blerg')) + w2.write(Buffer.from('blerg')) + w2.end() + + setImmediate(function () { + t.equal(w._write, _write) + t.ok(_writeCalled) + t.equal(w2._writev, _writev) + t.equal(dLength, 2) + t.ok(_writevCalled) + }) +}) diff --git a/src/test/browser/test-stream-writable-decoded-encoding.js b/src/test/browser/test-stream-writable-decoded-encoding.js new file mode 100644 index 0000000000..4dbba262d8 --- /dev/null +++ b/src/test/browser/test-stream-writable-decoded-encoding.js @@ -0,0 +1,49 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const stream = require('../../lib') + +function MyWritable(fn, options) { + stream.Writable.call(this, options) + this.fn = fn +} + +inherits(MyWritable, stream.Writable) + +MyWritable.prototype._write = function (chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding) + callback() +} + +test('decodeStringsTrue', (t) => { + t.plan(3) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.ok(isBuffer) + t.equal(type, 'object') + t.equal(enc, 'buffer') + // console.log('ok - decoded string is decoded'); + }, + { decodeStrings: true } + ) + m.write('some-text', 'utf8') + m.end() +}) + +test('decodeStringsFalse', (t) => { + t.plan(3) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.notOk(isBuffer) + t.equal(type, 'string') + t.equal(enc, 'utf8') + // console.log('ok - un-decoded string is not decoded'); + }, + { decodeStrings: false } + ) + m.write('some-text', 'utf8') + m.end() +}) diff --git a/src/test/browser/test-stream-writev.js b/src/test/browser/test-stream-writev.js new file mode 100644 index 0000000000..a100517f3f --- /dev/null +++ b/src/test/browser/test-stream-writev.js @@ -0,0 +1,101 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +const queue = [] +for (let decode = 0; decode < 2; decode++) { + for (let uncork = 0; uncork < 2; uncork++) { + for (let multi = 0; multi < 2; multi++) { + queue.push([!!decode, !!uncork, !!multi]) + } + } +} + +function runTest(decode, uncork, multi) { + return function (t) { + t.plan(8) + + // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi); + let counter = 0 + let expectCount = 0 + function cnt(msg) { + expectCount++ + const expect = expectCount + return function (er) { + if (er) { + throw er + } + counter++ + t.equal(counter, expect) + } + } + + const w = new stream.Writable({ decodeStrings: decode }) + w._write = function (chunk, e, cb) { + t.ok(false, 'Should not call _write') + } + + const expectChunks = decode + ? [ + { encoding: 'buffer', chunk: [104, 101, 108, 108, 111, 44, 32] }, + { encoding: 'buffer', chunk: [119, 111, 114, 108, 100] }, + { encoding: 'buffer', chunk: [33] }, + { encoding: 'buffer', chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, + { encoding: 'buffer', chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] } + ] + : [ + { encoding: 'ascii', chunk: 'hello, ' }, + { encoding: 'utf8', chunk: 'world' }, + { encoding: 'buffer', chunk: [33] }, + { encoding: 'binary', chunk: '\nand then...' }, + { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' } + ] + + let actualChunks + w._writev = function (chunks, cb) { + actualChunks = chunks.map(function (chunk) { + return { + encoding: chunk.encoding, + chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk + } + }) + cb() + } + + w.cork() + w.write('hello, ', 'ascii', cnt('hello')) + w.write('world', 'utf8', cnt('world')) + + if (multi) { + w.cork() + } + + w.write(Buffer.from('!'), 'buffer', cnt('!')) + w.write('\nand then...', 'binary', cnt('and then')) + + if (multi) { + w.uncork() + } + + w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')) + + if (uncork) { + w.uncork() + } + + w.end(cnt('end')) + + w.on('finish', function () { + // make sure finish comes after all the write cb + cnt('finish')() + t.deepEqual(expectChunks, actualChunks) + }) + } +} + +for (let i = 0; i < queue.length; i++) { + const tr = queue[i] + + test('round ' + i, runTest(tr[0], tr[1], tr[2])) +} diff --git a/src/test/browser/test-stream2-base64-single-char-read-end.js b/src/test/browser/test-stream2-base64-single-char-read-end.js new file mode 100644 index 0000000000..dd5dc5bf61 --- /dev/null +++ b/src/test/browser/test-stream2-base64-single-char-read-end.js @@ -0,0 +1,39 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable } = require('../../lib') + +test('base64 single char read end', function (t) { + t.plan(1) + + const src = new Readable({ encoding: 'base64' }) + const dst = new Writable() + let hasRead = false + const accum = [] + + src._read = function (n) { + if (!hasRead) { + hasRead = true + process.nextTick(function () { + src.push(Buffer.from('1')) + src.push(null) + }) + } + } + + dst._write = function (chunk, enc, cb) { + accum.push(chunk) + cb() + } + + src.on('end', function () { + t.equal(Buffer.concat(accum) + '', 'MQ==') + clearTimeout(timeout) + }) + + src.pipe(dst) + + const timeout = setTimeout(function () { + t.fail('timed out waiting for _write') + }, 100) +}) diff --git a/src/test/browser/test-stream2-compatibility.js b/src/test/browser/test-stream2-compatibility.js new file mode 100644 index 0000000000..d9abbba6c0 --- /dev/null +++ b/src/test/browser/test-stream2-compatibility.js @@ -0,0 +1,34 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable } = require('../../lib') + +test('compatibility', function (t) { + t.plan(1) + + let ondataCalled = 0 + + function TestReader() { + Readable.apply(this) + this._buffer = Buffer.alloc(100) + this._buffer.fill('x') + + this.on('data', function () { + ondataCalled++ + }) + } + + inherits(TestReader, Readable) + + TestReader.prototype._read = function (n) { + this.push(this._buffer) + this._buffer = Buffer.alloc(0) + } + + setTimeout(function () { + t.equal(ondataCalled, 1) + }) + + new TestReader().read() +}) diff --git a/src/test/browser/test-stream2-large-read-stall.js b/src/test/browser/test-stream2-large-read-stall.js new file mode 100644 index 0000000000..42c8e66cc7 --- /dev/null +++ b/src/test/browser/test-stream2-large-read-stall.js @@ -0,0 +1,60 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('large object read stall', function (t) { + t.plan(1) + + // If everything aligns so that you do a read(n) of exactly the + // remaining buffer, then make sure that 'end' still emits. + + const READSIZE = 100 + const PUSHSIZE = 20 + const PUSHCOUNT = 1000 + const HWM = 50 + + const r = new Readable({ + highWaterMark: HWM + }) + const rs = r._readableState + + r._read = push + + r.on('readable', function () { + false && console.error('>> readable') + do { + false && console.error(' > read(%d)', READSIZE) + var ret = r.read(READSIZE) + false && console.error(' < %j (%d remain)', ret && ret.length, rs.length) + } while (ret && ret.length === READSIZE) + + false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length) + }) + + r.on('end', function () { + t.equal(pushes, PUSHCOUNT + 1) + + false && console.error('end') + }) + + let pushes = 0 + function push() { + if (pushes > PUSHCOUNT) { + return + } + + if (pushes++ === PUSHCOUNT) { + false && console.error(' push(EOF)') + return r.push(null) + } + + false && console.error(' push #%d', pushes) + if (r.push(Buffer.alloc(PUSHSIZE))) { + setTimeout(push) + } + } + + // start the flow + r.read(0) +}) diff --git a/src/test/browser/test-stream2-objects.js b/src/test/browser/test-stream2-objects.js new file mode 100644 index 0000000000..75881abf8d --- /dev/null +++ b/src/test/browser/test-stream2-objects.js @@ -0,0 +1,304 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable } = require('../../lib') + +function toArray(callback) { + const stream = new Writable({ objectMode: true }) + const list = [] + stream.write = function (chunk) { + list.push(chunk) + } + + stream.end = function () { + callback(list) + } + + return stream +} + +function fromArray(list) { + const r = new Readable({ objectMode: true }) + r._read = noop + forEach(list, function (chunk) { + r.push(chunk) + }) + r.push(null) + + return r +} + +function noop() {} + +test('can read objects from stream', function (t) { + t.plan(3) + + const r = fromArray([{ one: '1' }, { two: '2' }]) + + const v1 = r.read() + const v2 = r.read() + const v3 = r.read() + + t.deepEqual(v1, { one: '1' }) + t.deepEqual(v2, { two: '2' }) + t.deepEqual(v3, null) +}) + +test('can pipe objects into stream', function (t) { + t.plan(1) + + const r = fromArray([{ one: '1' }, { two: '2' }]) + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) +}) + +test('read(n) is ignored', function (t) { + t.plan(1) + + const r = fromArray([{ one: '1' }, { two: '2' }]) + + const value = r.read(2) + + t.deepEqual(value, { one: '1' }) +}) + +test('can read objects from _read (sync)', function (t) { + t.plan(1) + + const r = new Readable({ objectMode: true }) + const list = [{ one: '1' }, { two: '2' }] + r._read = function (n) { + const item = list.shift() + r.push(item || null) + } + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) +}) + +test('can read objects from _read (async)', function (t) { + t.plan(1) + + const r = new Readable({ objectMode: true }) + const list = [{ one: '1' }, { two: '2' }] + r._read = function (n) { + const item = list.shift() + process.nextTick(function () { + r.push(item || null) + }) + } + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) +}) + +test('can read strings as objects', function (t) { + t.plan(1) + + const r = new Readable({ + objectMode: true + }) + r._read = noop + const list = ['one', 'two', 'three'] + forEach(list, function (str) { + r.push(str) + }) + r.push(null) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, list) + }) + ) +}) + +test('read(0) for object streams', function (t) { + t.plan(1) + + const r = new Readable({ + objectMode: true + }) + r._read = noop + + r.push('foobar') + r.push(null) + + r.read(0) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, ['foobar']) + }) + ) +}) + +test('falsey values', function (t) { + t.plan(1) + + const r = new Readable({ + objectMode: true + }) + r._read = noop + + r.push(false) + r.push(0) + r.push('') + r.push(null) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, [false, 0, '']) + }) + ) +}) + +test('high watermark _read', function (t) { + t.plan(5) + + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) + let calls = 0 + const list = ['1', '2', '3', '4', '5', '6', '7', '8'] + + r._read = function (n) { + calls++ + } + + forEach(list, function (c) { + r.push(c) + }) + + const v = r.read() + + t.equal(calls, 0) + t.equal(v, '1') + + const v2 = r.read() + t.equal(v2, '2') + + const v3 = r.read() + t.equal(v3, '3') + + t.equal(calls, 1) +}) + +test('high watermark push', function (t) { + t.plan(6) + + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) + r._read = function (n) {} + for (let i = 0; i < 6; i++) { + const bool = r.push(i) + t.equal(bool, i !== 5) + } +}) + +test('can write objects to stream', function (t) { + t.plan(1) + + const w = new Writable({ objectMode: true }) + + w._write = function (chunk, encoding, cb) { + t.deepEqual(chunk, { foo: 'bar' }) + cb() + } + + w.on('finish', function () {}) + + w.write({ foo: 'bar' }) + w.end() +}) + +test('can write multiple objects to stream', function (t) { + t.plan(1) + + const w = new Writable({ objectMode: true }) + const list = [] + + w._write = function (chunk, encoding, cb) { + list.push(chunk) + cb() + } + + w.on('finish', function () { + t.deepEqual(list, [0, 1, 2, 3, 4]) + }) + + w.write(0) + w.write(1) + w.write(2) + w.write(3) + w.write(4) + w.end() +}) + +test('can write strings as objects', function (t) { + t.plan(1) + + const w = new Writable({ + objectMode: true + }) + const list = [] + + w._write = function (chunk, encoding, cb) { + list.push(chunk) + process.nextTick(cb) + } + + w.on('finish', function () { + t.deepEqual(list, ['0', '1', '2', '3', '4']) + }) + + w.write('0') + w.write('1') + w.write('2') + w.write('3') + w.write('4') + w.end() +}) + +test('buffers finish until cb is called', function (t) { + t.plan(2) + + const w = new Writable({ + objectMode: true + }) + let called = false + + w._write = function (chunk, encoding, cb) { + t.equal(chunk, 'foo') + + process.nextTick(function () { + called = true + cb() + }) + } + + w.on('finish', function () { + t.equal(called, true) + }) + + w.write('foo') + w.end() +}) + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} diff --git a/src/test/browser/test-stream2-pipe-error-handling.js b/src/test/browser/test-stream2-pipe-error-handling.js new file mode 100644 index 0000000000..a3b053e52c --- /dev/null +++ b/src/test/browser/test-stream2-pipe-error-handling.js @@ -0,0 +1,89 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +test('Error Listener Catches', function (t) { + t.plan(3) + + let count = 1000 + + const source = new stream.Readable() + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + + let gotErr = null + dest.on('error', function (err) { + gotErr = err + }) + + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + + const err = new Error('This stream turned into bacon.') + dest.emit('error', err) + t.strictEqual(gotErr, err) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) +}) + +test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { + t.plan(3) + + let count = 1000 + + const source = new stream.Readable() + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + + const err = new Error('This stream turned into bacon.') + + let gotErr = null + try { + dest.emit('error', err) + } catch (e) { + gotErr = e + } + t.strictEqual(gotErr, err) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) +}) diff --git a/src/test/browser/test-stream2-pipe-error-once-listener.js b/src/test/browser/test-stream2-pipe-error-once-listener.js new file mode 100644 index 0000000000..afb29324b6 --- /dev/null +++ b/src/test/browser/test-stream2-pipe-error-once-listener.js @@ -0,0 +1,39 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const stream = require('../../lib') + +test('pipe error once listener', function (t) { + t.plan(1) + + const Read = function () { + stream.Readable.call(this) + } + inherits(Read, stream.Readable) + + Read.prototype._read = function (size) { + this.push('x') + this.push(null) + } + + const Write = function () { + stream.Writable.call(this) + } + inherits(Write, stream.Writable) + + Write.prototype._write = function (buffer, encoding, cb) { + this.emit('error', new Error('boom')) + this.emit('alldone') + } + + const read = new Read() + const write = new Write() + + write.once('error', () => {}) + write.once('alldone', function () { + t.ok(true) + }) + + read.pipe(write) +}) diff --git a/src/test/browser/test-stream2-push.js b/src/test/browser/test-stream2-push.js new file mode 100644 index 0000000000..ba7c4eb39e --- /dev/null +++ b/src/test/browser/test-stream2-push.js @@ -0,0 +1,117 @@ +'use strict' + +const test = require('tape') +const { EventEmitter: EE } = require('events') +const { Readable, Writable } = require('../../lib') + +test('push', function (t) { + t.plan(33) + + const stream = new Readable({ + highWaterMark: 16, + encoding: 'utf8' + }) + + const source = new EE() + + stream._read = function () { + // console.error('stream._read'); + readStart() + } + + let ended = false + stream.on('end', function () { + ended = true + }) + + source.on('data', function (chunk) { + const ret = stream.push(chunk) + // console.error('data', stream._readableState.length); + if (!ret) { + readStop() + } + }) + + source.on('end', function () { + stream.push(null) + }) + + let reading = false + + function readStart() { + // console.error('readStart'); + reading = true + } + + function readStop() { + // console.error('readStop'); + reading = false + process.nextTick(function () { + const r = stream.read() + if (r !== null) { + writer.write(r) + } + }) + } + + const writer = new Writable({ + decodeStrings: false + }) + + const written = [] + + const expectWritten = [ + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg' + ] + + writer._write = function (chunk, encoding, cb) { + // console.error('WRITE %s', chunk); + written.push(chunk) + process.nextTick(cb) + } + + writer.on('finish', finish) + + // now emit some chunks. + + const chunk = 'asdfg' + + let set = 0 + readStart() + data() + function data() { + t.ok(reading) + source.emit('data', chunk) + t.ok(reading) + source.emit('data', chunk) + t.ok(reading) + source.emit('data', chunk) + t.ok(reading) + source.emit('data', chunk) + t.notOk(reading) + if (set++ < 5) { + setTimeout(data, 10) + } else { + end() + } + } + + function finish() { + // console.error('finish'); + t.deepEqual(written, expectWritten) + } + + function end() { + source.emit('end') + t.notOk(reading) + writer.end(stream.read()) + setTimeout(function () { + t.ok(ended) + }) + } +}) diff --git a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js new file mode 100644 index 0000000000..aa2fce315c --- /dev/null +++ b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -0,0 +1,93 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('readable empty buffer no eof 1', function (t) { + t.plan(1) + + const r = new Readable() + + // should not end when we get a Buffer(0) or '' as the _read result + // that just means that there is *temporarily* no data, but to go + // ahead and try again later. + // + // note that this is very unusual. it only works for crypto streams + // because the other side of the stream will call read(0) to cycle + // data through openssl. that's why we set the timeouts to call + // r.read(0) again later, otherwise there is no more work being done + // and the process just exits. + + const buf = Buffer.alloc(5) + buf.fill('x') + let reads = 5 + r._read = function (n) { + switch (reads--) { + case 0: + return r.push(null) // EOF + case 1: + return r.push(buf) + case 2: + setTimeout(r.read.bind(r, 0), 50) + return r.push(Buffer.alloc(0)) // Not-EOF! + case 3: + setTimeout(r.read.bind(r, 0), 50) + return process.nextTick(function () { + return r.push(Buffer.alloc(0)) + }) + case 4: + setTimeout(r.read.bind(r, 0), 50) + return setTimeout(function () { + return r.push(Buffer.alloc(0)) + }) + case 5: + return setTimeout(function () { + return r.push(buf) + }) + default: + throw new Error('unreachable') + } + } + + const results = [] + function flow() { + let chunk + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } + } + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']) + }) + flow() +}) + +test('readable empty buffer no eof 2', function (t) { + t.plan(1) + + const r = new Readable({ encoding: 'base64' }) + let reads = 5 + r._read = function (n) { + if (!reads--) { + return r.push(null) // EOF + } else { + return r.push(Buffer.from('x')) + } + } + + const results = [] + function flow() { + let chunk + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } + } + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['eHh4', 'eHg=', 'EOF']) + }) + flow() +}) diff --git a/src/test/browser/test-stream2-readable-from-list.js b/src/test/browser/test-stream2-readable-from-list.js new file mode 100644 index 0000000000..2bc0809c07 --- /dev/null +++ b/src/test/browser/test-stream2-readable-from-list.js @@ -0,0 +1,65 @@ +'use strict' + +const test = require('tape') +const { _fromList: fromList } = require('../../lib/_stream_readable') +const BufferList = require('../../lib/internal/streams/buffer_list') + +function bufferListFromArray(arr) { + const bl = new BufferList() + for (let i = 0; i < arr.length; ++i) { + bl.push(arr[i]) + } + return bl +} + +test('buffers', function (t) { + t.plan(5) + + let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] + list = bufferListFromArray(list) + + // read more than the first element. + let ret = fromList(6, { buffer: list, length: 16 }) + t.equal(ret.toString(), 'foogba') + + // read exactly the first element. + ret = fromList(2, { buffer: list, length: 10 }) + t.equal(ret.toString(), 'rk') + + // read less than the first element. + ret = fromList(2, { buffer: list, length: 8 }) + t.equal(ret.toString(), 'ba') + + // read more than we have. + ret = fromList(100, { buffer: list, length: 6 }) + t.equal(ret.toString(), 'zykuel') + + // all consumed. + t.same(list, new BufferList()) +}) + +test('strings', function (t) { + t.plan(5) + + let list = ['foog', 'bark', 'bazy', 'kuel'] + list = bufferListFromArray(list) + + // read more than the first element. + let ret = fromList(6, { buffer: list, length: 16, decoder: true }) + t.equal(ret, 'foogba') + + // read exactly the first element. + ret = fromList(2, { buffer: list, length: 10, decoder: true }) + t.equal(ret, 'rk') + + // read less than the first element. + ret = fromList(2, { buffer: list, length: 8, decoder: true }) + t.equal(ret, 'ba') + + // read more than we have. + ret = fromList(100, { buffer: list, length: 6, decoder: true }) + t.equal(ret, 'zykuel') + + // all consumed. + t.same(list, new BufferList()) +}) diff --git a/src/test/browser/test-stream2-readable-legacy-drain.js b/src/test/browser/test-stream2-readable-legacy-drain.js new file mode 100644 index 0000000000..c4c8ca1816 --- /dev/null +++ b/src/test/browser/test-stream2-readable-legacy-drain.js @@ -0,0 +1,52 @@ +'use strict' + +const test = require('tape') +const { Stream, Readable } = require('../../lib') + +test('readable legacy drain', function (t) { + t.plan(3) + + const r = new Readable() + const N = 256 + let reads = 0 + r._read = function (n) { + return r.push(++reads === N ? null : Buffer.alloc(1)) + } + + r.on('end', function () { + t.ok(true, 'rended') + }) + + const w = new Stream() + w.writable = true + let writes = 0 + let buffered = 0 + w.write = function (c) { + writes += c.length + buffered += c.length + process.nextTick(drain) + return false + } + + function drain() { + if (buffered > 3) { + t.ok(false, 'to much buffer') + } + buffered = 0 + w.emit('drain') + } + + w.end = function () { + t.equal(writes, 255) + t.ok(true, 'wended') + } + + // Just for kicks, let's mess with the drain count. + // This verifies that even if it gets negative in the + // pipe() cleanup function, we'll still function properly. + r.on('readable', function () { + w.emit('drain') + }) + + r.pipe(w) +}) diff --git a/src/test/browser/test-stream2-readable-non-empty-end.js b/src/test/browser/test-stream2-readable-non-empty-end.js new file mode 100644 index 0000000000..bd1c29bc5d --- /dev/null +++ b/src/test/browser/test-stream2-readable-non-empty-end.js @@ -0,0 +1,58 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('non empty end', function (t) { + t.plan(4) + + let len = 0 + const chunks = new Array(10) + for (let i = 1; i <= 10; i++) { + chunks[i - 1] = Buffer.alloc(i) + len += i + } + + const test = new Readable() + let n = 0 + test._read = function (size) { + const chunk = chunks[n++] + setTimeout(function () { + test.push(chunk === undefined ? null : chunk) + }) + } + + test.on('end', thrower) + function thrower() { + throw new Error('this should not happen!') + } + + let bytesread = 0 + test.on('readable', function () { + const b = len - bytesread - 1 + const res = test.read(b) + if (res) { + bytesread += res.length + // console.error('br=%d len=%d', bytesread, len); + setTimeout(next) + } + test.read(0) + }) + test.read(0) + + function next() { + // now let's make 'end' happen + test.removeListener('end', thrower) + + test.on('end', function () { + t.ok(true, 'end emitted') + }) + + // one to get the last byte + let r = test.read() + t.ok(r) + t.equal(r.length, 1) + r = test.read() + t.equal(r, null) + } +}) diff --git a/src/test/browser/test-stream2-readable-wrap-empty.js b/src/test/browser/test-stream2-readable-wrap-empty.js new file mode 100644 index 0000000000..ea870bc99a --- /dev/null +++ b/src/test/browser/test-stream2-readable-wrap-empty.js @@ -0,0 +1,23 @@ +'use strict' + +const test = require('tape') +const { EventEmitter: EE } = require('events') +const Readable = require('../../lib') + +test('wrap empty', function (t) { + t.plan(1) + + const oldStream = new EE() + oldStream.pause = function () {} + oldStream.resume = function () {} + + const newStream = new Readable().wrap(oldStream) + + newStream + .on('readable', function () {}) + .on('end', function () { + t.ok(true, 'ended') + }) + + oldStream.emit('end') +}) diff --git a/src/test/browser/test-stream2-readable-wrap.js b/src/test/browser/test-stream2-readable-wrap.js new file mode 100644 index 0000000000..e98f039f40 --- /dev/null +++ b/src/test/browser/test-stream2-readable-wrap.js @@ -0,0 +1,94 @@ +'use strict' + +const test = require('tape') +const { EventEmitter: EE } = require('events') +const { Readable, Writable } = require('../../lib') + +let run = 0 + +function runTest(highWaterMark, objectMode, produce) { + test('run #' + ++run, (t) => { + t.plan(4) + + const old = new EE() + const r = new Readable({ highWaterMark: highWaterMark, objectMode: objectMode }) + t.equal(r, r.wrap(old)) + + let ended = false + r.on('end', function () { + ended = true + }) + + old.pause = function () { + // console.error('old.pause()'); + old.emit('pause') + flowing = false + } + + old.resume = function () { + // console.error('old.resume()'); + old.emit('resume') + flow() + } + + let flowing + let chunks = 10 + let oldEnded = false + const expected = [] + function flow() { + flowing = true + // eslint-disable-next-line no-unmodified-loop-condition + while (flowing && chunks-- > 0) { + const item = produce() + expected.push(item) + // console.log('old.emit', chunks, flowing); + old.emit('data', item) + // console.log('after emit', chunks, flowing); + } + if (chunks <= 0) { + oldEnded = true + // console.log('old end', chunks, flowing); + old.emit('end') + } + } + + const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode: objectMode }) + const written = [] + w._write = function (chunk, encoding, cb) { + // console.log('_write', chunk); + written.push(chunk) + setTimeout(cb) + } + + w.on('finish', function () { + performAsserts() + }) + + r.pipe(w) + + flow() + + function performAsserts() { + t.ok(ended) + t.ok(oldEnded) + t.deepEqual(written, expected) + } + }) +} + +runTest(100, false, function () { + return Buffer.alloc(100) +}) + +runTest(10, false, function () { + return Buffer.from('xxxxxxxxxx') +}) + +runTest(1, true, function () { + return { foo: 'bar' } +}) + +const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555] +runTest(1, true, function () { + return objectChunks.shift() +}) diff --git a/src/test/browser/test-stream2-set-encoding.js b/src/test/browser/test-stream2-set-encoding.js new file mode 100644 index 0000000000..2b0de36be2 --- /dev/null +++ b/src/test/browser/test-stream2-set-encoding.js @@ -0,0 +1,335 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable } = require('../../lib') + +inherits(TestReader, Readable) + +function TestReader(n, opts) { + Readable.call(this, opts) + + this.pos = 0 + this.len = n || 100 +} + +TestReader.prototype._read = function (n) { + setTimeout( + function () { + if (this.pos >= this.len) { + // double push(null) to test eos handling + this.push(null) + return this.push(null) + } + + n = Math.min(n, this.len - this.pos) + if (n <= 0) { + // double push(null) to test eos handling + this.push(null) + return this.push(null) + } + + this.pos += n + const ret = Buffer.alloc(n) + ret.fill('a') + + // console.log('this.push(ret)', ret); + + return this.push(ret) + }.bind(this), + 1 + ) +} + +test('setEncoding utf8', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('utf8') + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('setEncoding hex', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('setEncoding hex with read(13)', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + + tr.on('readable', function flow() { + // console.log('readable once'); + let chunk + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + // console.log('END'); + t.same(out, expect) + }) +}) + +test('setEncoding base64', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('base64') + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: utf8', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'utf8' }) + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: hex', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'hex' }) + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: hex with read(13)', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'hex' }) + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: base64', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'base64' }) + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('chainable', function (t) { + t.plan(1) + + const tr = new TestReader(100) + t.equal(tr.setEncoding('utf8'), tr) +}) diff --git a/src/test/browser/test-stream2-transform.js b/src/test/browser/test-stream2-transform.js new file mode 100644 index 0000000000..4fa2edb25b --- /dev/null +++ b/src/test/browser/test-stream2-transform.js @@ -0,0 +1,485 @@ +'use strict' + +const test = require('tape') +const { PassThrough, Transform } = require('../../lib') + +test('writable side consumption', function (t) { + t.plan(4) + + const tx = new Transform({ + highWaterMark: 10 + }) + + let transformed = 0 + tx._transform = function (chunk, encoding, cb) { + transformed += chunk.length + tx.push(chunk) + cb() + } + + for (let i = 1; i <= 10; i++) { + tx.write(Buffer.alloc(i)) + } + tx.end() + + t.equal(tx._readableState.length, 10) + t.equal(transformed, 10) + t.equal(tx._transformState.writechunk.length, 5) + t.same( + tx._writableState.getBuffer().map(function (c) { + return c.chunk.length + }), + [6, 7, 8, 9, 10] + ) +}) + +test('passthrough', function (t) { + t.plan(4) + + const pt = new PassThrough() + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') +}) + +test('object passthrough', function (t) { + t.plan(7) + + const pt = new PassThrough({ objectMode: true }) + + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ a: 'b' }) + pt.end() + + t.equal(pt.read(), 1) + t.equal(pt.read(), true) + t.equal(pt.read(), false) + t.equal(pt.read(), 0) + t.equal(pt.read(), 'foo') + t.equal(pt.read(), '') + t.same(pt.read(), { a: 'b' }) +}) + +test('simple transform', function (t) { + t.plan(4) + + const pt = new Transform() + pt._transform = function (c, e, cb) { + const ret = Buffer.alloc(c.length) + ret.fill('x') + pt.push(ret) + cb() + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'x') +}) + +test('simple object transform', function (t) { + t.plan(7) + + const pt = new Transform({ objectMode: true }) + pt._transform = function (c, e, cb) { + pt.push(JSON.stringify(c)) + cb() + } + + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ a: 'b' }) + pt.end() + + t.equal(pt.read(), '1') + t.equal(pt.read(), 'true') + t.equal(pt.read(), 'false') + t.equal(pt.read(), '0') + t.equal(pt.read(), '"foo"') + t.equal(pt.read(), '""') + t.equal(pt.read(), '{"a":"b"}') +}) + +test('async passthrough', function (t) { + t.plan(4) + + const pt = new Transform() + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') + }) +}) + +test('assymetric transform (expand)', function (t) { + t.plan(7) + + const pt = new Transform() + + // emit each chunk 2 times. + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + }, 10) + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogf') + t.equal(pt.read(5).toString(), 'oogba') + t.equal(pt.read(5).toString(), 'rkbar') + t.equal(pt.read(5).toString(), 'kbazy') + t.equal(pt.read(5).toString(), 'bazyk') + t.equal(pt.read(5).toString(), 'uelku') + t.equal(pt.read(5).toString(), 'el') + }) +}) + +test('assymetric transform (compress)', function (t) { + t.plan(3) + + const pt = new Transform() + + // each output is the first char of 3 consecutive chunks, + // or whatever's left. + pt.state = '' + + pt._transform = function (chunk, encoding, cb) { + if (!chunk) { + chunk = '' + } + const s = chunk.toString() + setTimeout( + function () { + this.state += s.charAt(0) + if (this.state.length === 3) { + pt.push(Buffer.from(this.state)) + this.state = '' + } + cb() + }.bind(this), + 10 + ) + } + + pt._flush = function (cb) { + // just output whatever we have. + pt.push(Buffer.from(this.state)) + this.state = '' + cb() + } + + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.end() + + // 'abcdeabcdeabcd' + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcd') + }) +}) + +// this tests for a stall when data is written to a full stream +// that has empty transforms. +test('complex transform', function (t) { + t.plan(2) + + let count = 0 + let saved = null + const pt = new Transform({ highWaterMark: 3 }) + pt._transform = function (c, e, cb) { + if (count++ === 1) { + saved = c + } else { + if (saved) { + pt.push(saved) + saved = null + } + pt.push(c) + } + + cb() + } + + pt.once('readable', function () { + process.nextTick(function () { + pt.write(Buffer.from('d')) + pt.write(Buffer.from('ef'), function () { + pt.end() + }) + t.equal(pt.read().toString(), 'abcdef') + t.equal(pt.read(), null) + }) + }) + + pt.write(Buffer.from('abc')) +}) + +test('passthrough event emission', function (t) { + t.plan(11) + + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('>>> emit readable %d', emits); + emits++ + }) + + pt.write(Buffer.from('foog')) + + // console.error('need emit 0'); + pt.write(Buffer.from('bark')) + + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits) + t.equal(emits, 1) + + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5) + '', 'null') + + // console.error('need emit 1'); + + pt.write(Buffer.from('bazy')) + // console.error('should have emitted, but not again'); + pt.write(Buffer.from('kuel')) + + // console.error('should have emitted readable now 2 === %d', emits); + setTimeout(() => { + t.equal(emits, 2) + + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5), null) + + // console.error('need emit 2'); + + pt.end() + + setTimeout(() => { + t.equal(emits, 3) + + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) + + // console.error('should not have emitted again'); + t.equal(emits, 3) + }) + }) + }) +}) + +test('passthrough event emission reordered', function (t) { + t.plan(10) + + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('emit readable', emits); + emits++ + }) + + pt.write(Buffer.from('foog')) + // console.error('need emit 0'); + pt.write(Buffer.from('bark')) + + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits); + t.equal(emits, 1) + + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5), null) + + // console.error('need emit 1'); + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'arkba') + + t.equal(pt.read(5), null) + + // console.error('need emit 2'); + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5), null) + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) + t.equal(emits, 4) + }) + pt.end() + }) + pt.write(Buffer.from('kuel')) + }) + + pt.write(Buffer.from('bazy')) + }) +}) + +test('passthrough facaded', function (t) { + t.plan(1) + + // console.error('passthrough facaded'); + const pt = new PassThrough() + const datas = [] + pt.on('data', function (chunk) { + datas.push(chunk.toString()) + }) + + pt.on('end', function () { + t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) + }) + + pt.write(Buffer.from('foog')) + setTimeout(function () { + pt.write(Buffer.from('bark')) + setTimeout(function () { + pt.write(Buffer.from('bazy')) + setTimeout(function () { + pt.write(Buffer.from('kuel')) + setTimeout(function () { + pt.end() + }, 10) + }, 10) + }, 10) + }, 10) +}) + +test('object transform (json parse)', function (t) { + t.plan(5) + + // console.error('json parse stream'); + const jp = new Transform({ objectMode: true }) + jp._transform = function (data, encoding, cb) { + try { + jp.push(JSON.parse(data)) + cb() + } catch (er) { + cb(er) + } + } + + // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + + let ended = false + jp.on('end', function () { + ended = true + }) + + forEach(objects, function (obj) { + jp.write(JSON.stringify(obj)) + const res = jp.read() + t.same(res, obj) + }) + + jp.end() + // read one more time to get the 'end' event + jp.read() + + process.nextTick(function () { + t.ok(ended) + }) +}) + +test('object transform (json stringify)', function (t) { + t.plan(5) + + // console.error('json parse stream'); + const js = new Transform({ objectMode: true }) + js._transform = function (data, encoding, cb) { + try { + js.push(JSON.stringify(data)) + cb() + } catch (er) { + cb(er) + } + } + + // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + + let ended = false + js.on('end', function () { + ended = true + }) + + forEach(objects, function (obj) { + js.write(obj) + const res = js.read() + t.equal(res, JSON.stringify(obj)) + }) + + js.end() + // read one more time to get the 'end' event + js.read() + + process.nextTick(function () { + t.ok(ended) + }) +}) + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} diff --git a/src/test/browser/test-stream2-unpipe-drain.js b/src/test/browser/test-stream2-unpipe-drain.js new file mode 100644 index 0000000000..88ab66e5a4 --- /dev/null +++ b/src/test/browser/test-stream2-unpipe-drain.js @@ -0,0 +1,63 @@ +'use strict' + +const test = require('tape') +const crypto = require('crypto') +const inherits = require('inherits') +const stream = require('../../lib') + +test('unpipe drain', function (t) { + try { + crypto.randomBytes(9) + } catch (_) { + t.plan(1) + t.ok(true, 'does not suport random, skipping') + return + } + + t.plan(2) + + function TestWriter() { + stream.Writable.call(this) + } + inherits(TestWriter, stream.Writable) + + TestWriter.prototype._write = function (buffer, encoding, callback) { + // console.log('write called'); + // super slow write stream (callback never called) + } + + const dest = new TestWriter() + + function TestReader(id) { + stream.Readable.call(this) + this.reads = 0 + } + inherits(TestReader, stream.Readable) + + TestReader.prototype._read = function (size) { + this.reads += 1 + this.push(crypto.randomBytes(size)) + } + + const src1 = new TestReader() + const src2 = new TestReader() + + src1.pipe(dest) + + src1.once('readable', function () { + process.nextTick(function () { + src2.pipe(dest) + + src2.once('readable', function () { + process.nextTick(function () { + src1.unpipe(dest) + }) + }) + }) + }) + + dest.on('unpipe', function () { + t.equal(src1.reads, 2) + t.equal(src2.reads, 1) + }) +}) diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js new file mode 100644 index 0000000000..cbcedf6a89 --- /dev/null +++ b/src/test/browser/test-stream2-writable.js @@ -0,0 +1,430 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Duplex, Writable } = require('../../lib') + +inherits(TestWriter, Writable) + +function TestWriter() { + Writable.apply(this, arguments) + this.buffer = [] + this.written = 0 +} + +TestWriter.prototype._write = function (chunk, encoding, cb) { + // simulate a small unpredictable latency + setTimeout( + function () { + this.buffer.push(chunk.toString()) + this.written += chunk.length + cb() + }.bind(this), + Math.floor(Math.random() * 10) + ) +} +inherits(Processstdout, Writable) + +function Processstdout() { + Writable.apply(this, arguments) + this.buffer = [] + this.written = 0 +} + +Processstdout.prototype._write = function (chunk, encoding, cb) { + // console.log(chunk.toString()); + cb() +} +const chunks = new Array(50) +for (let i = 0; i < chunks.length; i++) { + chunks[i] = new Array(i + 1).join('x') +} + +if (!process.stdout) { + process.stdout = new Processstdout() +} + +test('write fast', function (t) { + t.plan(1) + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) + + forEach(chunks, function (chunk) { + // screw backpressure. Just buffer it all up. + tw.write(chunk) + }) + tw.end() +}) + +test('write slow', function (t) { + t.plan(1) + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) + + let i = 0 + ;(function W() { + tw.write(chunks[i++]) + if (i < chunks.length) { + setTimeout(W, 10) + } else { + tw.end() + } + })() +}) + +test('write backpressure', function (t) { + t.plan(19) + + const tw = new TestWriter({ + highWaterMark: 50 + }) + + let drains = 0 + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.equal(drains, 17) + }) + + tw.on('drain', function () { + drains++ + }) + + let i = 0 + ;(function W() { + let ret + do { + ret = tw.write(chunks[i++]) + } while (ret !== false && i < chunks.length) + + if (i < chunks.length) { + t.ok(tw._writableState.length >= 50) + tw.once('drain', W) + } else { + tw.end() + } + })() +}) + +test('write bufferize', function (t) { + t.plan(50) + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) + + // Some combination of encoding and length result in the last byte replaced by two extra null bytes + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } + + t.same(actual, chunk, 'got the expected chunks ' + i) + }) + }) + + forEach(chunks, function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) + tw.end() +}) + +test('write no bufferize', function (t) { + t.plan(100) + + const tw = new TestWriter({ + highWaterMark: 100, + decodeStrings: false + }) + + tw._write = function (chunk, encoding, cb) { + t.equals(typeof chunk, 'string') + chunk = Buffer.from(chunk, encoding) + return TestWriter.prototype._write.call(this, chunk, encoding, cb) + } + + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) + + // Some combination of encoding and length result in the last byte replaced by two extra null bytes + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } + + t.same(actual, chunk, 'got the expected chunks ' + i) + }) + }) + + forEach(chunks, function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) + tw.end() +}) + +test('write callbacks', function (t) { + t.plan(2) + + const callbacks = chunks + .map(function (chunk, i) { + return [ + i, + function (er) { + callbacks._called[i] = chunk + } + ] + }) + .reduce(function (set, x) { + set['callback-' + x[0]] = x[1] + return set + }, {}) + callbacks._called = [] + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + process.nextTick(function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.same(callbacks._called, chunks, 'called all callbacks') + }) + }) + + forEach(chunks, function (chunk, i) { + tw.write(chunk, callbacks['callback-' + i]) + }) + tw.end() +}) + +test('end callback', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end(() => { + t.ok(true) + }) +}) + +test('end callback with chunk', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end(Buffer.from('hello world'), () => { + t.ok(true) + }) +}) + +test('end callback with chunk and encoding', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end('hello world', 'ascii', () => { + t.ok(true) + }) +}) + +test('end callback after .write() call', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.write(Buffer.from('hello world')) + tw.end(() => { + t.ok(true) + }) +}) + +test('end callback called after write callback', function (t) { + t.plan(1) + + const tw = new TestWriter() + let writeCalledback = false + tw.write(Buffer.from('hello world'), function () { + writeCalledback = true + }) + tw.end(function () { + t.equal(writeCalledback, true) + }) +}) + +test('encoding should be ignored for buffers', function (t) { + t.plan(1) + + const tw = new Writable() + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' + tw._write = function (chunk, encoding, cb) { + t.equal(chunk.toString('hex'), hex) + } + const buf = Buffer.from(hex, 'hex') + tw.write(buf, 'binary') +}) + +test('writables are not pipable', function (t) { + t.plan(1) + + const w = new Writable() + w._write = function () {} + let gotError = false + w.on('error', function (er) { + gotError = true + }) + w.pipe(process.stdout) + t.ok(gotError) +}) + +test('duplexes are pipable', function (t) { + t.plan(1) + + const d = new Duplex() + d._read = function () {} + d._write = function () {} + let gotError = false + d.on('error', function (er) { + gotError = true + }) + d.pipe(process.stdout) + t.notOk(gotError) +}) + +test('end(chunk) two times is an error', function (t) { + t.plan(2) + + const w = new Writable() + w._write = function () {} + let gotError = false + w.on('error', function (er) { + gotError = true + t.equal(er.message, 'write after end') + }) + w.end('this is the end') + w.end('and so is this') + process.nextTick(function () { + t.ok(gotError) + }) +}) + +test('dont end while writing', function (t) { + t.plan(2) + + const w = new Writable() + let wrote = false + w._write = function (chunk, e, cb) { + t.notOk(this.writing) + wrote = true + this.writing = true + setTimeout(function () { + this.writing = false + cb() + }) + } + w.on('finish', function () { + t.ok(wrote) + }) + w.write(Buffer.alloc(0)) + w.end() +}) + +test('finish does not come before write cb', function (t) { + t.plan(1) + + const w = new Writable() + let writeCb = false + w._write = function (chunk, e, cb) { + setTimeout(function () { + writeCb = true + cb() + }, 10) + } + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0)) + w.end() +}) + +test('finish does not come before sync _write cb', function (t) { + t.plan(1) + + const w = new Writable() + let writeCb = false + w._write = function (chunk, e, cb) { + cb() + } + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0), function (er) { + writeCb = true + }) + w.end() +}) + +test('finish is emitted if last chunk is empty', function (t) { + t.plan(1) + + const w = new Writable() + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } + w.on('finish', () => { + t.ok(true) + }) + + w.write(Buffer.alloc(1)) + w.end(Buffer.alloc(0)) +}) + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} diff --git a/src/test/browser/test-stream3-pause-then-read.js b/src/test/browser/test-stream3-pause-then-read.js new file mode 100644 index 0000000000..34aa272d88 --- /dev/null +++ b/src/test/browser/test-stream3-pause-then-read.js @@ -0,0 +1,147 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable } = require('../../lib') + +test('pause then read', function (t) { + t.plan(7) + + const totalChunks = 100 + const chunkSize = 99 + const expectTotalData = totalChunks * chunkSize + let expectEndingData = expectTotalData + + const r = new Readable({ highWaterMark: 1000 }) + let chunks = totalChunks + r._read = function (n) { + if (!(chunks % 2)) { + setImmediate(push) + } else if (!(chunks % 3)) { + process.nextTick(push) + } else { + push() + } + } + + let totalPushed = 0 + function push() { + const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null + if (chunk) { + totalPushed += chunk.length + chunk.fill('x') + } + r.push(chunk) + } + + read100() + + // first we read 100 bytes + function read100() { + readn(100, onData) + } + + function readn(n, then) { + // console.error('read %d', n); + expectEndingData -= n + ;(function read() { + const c = r.read(n) + if (!c) { + r.once('readable', read) + } else { + t.equal(c.length, n) + t.notOk(r._readableState.flowing) + then() + } + })() + } + + // then we listen to some data events + function onData() { + expectEndingData -= 100 + // console.error('onData'); + let seen = 0 + r.on('data', function od(c) { + seen += c.length + if (seen >= 100) { + // seen enough + r.removeListener('data', od) + r.pause() + if (seen > 100) { + // oh no, seen too much! + // put the extra back. + const diff = seen - 100 + r.unshift(c.slice(c.length - diff)) + // console.error('seen too much', seen, diff) + } + + // Nothing should be lost in between + setImmediate(pipeLittle) + } + }) + } + + // Just pipe 200 bytes, then unshift the extra and unpipe + function pipeLittle() { + expectEndingData -= 200 + // console.error('pipe a little'); + const w = new Writable() + let written = 0 + w.on('finish', function () { + t.equal(written, 200) + setImmediate(read1234) + }) + w._write = function (chunk, encoding, cb) { + written += chunk.length + if (written >= 200) { + r.unpipe(w) + w.end() + cb() + if (written > 200) { + const diff = written - 200 + written -= diff + r.unshift(chunk.slice(chunk.length - diff)) + } + } else { + setImmediate(cb) + } + } + r.pipe(w) + } + + // now read 1234 more bytes + function read1234() { + readn(1234, resumePause) + } + + function resumePause() { + // console.error('resumePause'); + // don't read anything, just resume and re-pause a whole bunch + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + setImmediate(pipe) + } + + function pipe() { + // console.error('pipe the rest'); + const w = new Writable() + let written = 0 + w._write = function (chunk, encoding, cb) { + written += chunk.length + cb() + } + w.on('finish', function () { + // console.error('written', written, totalPushed); + t.equal(written, expectEndingData) + t.equal(totalPushed, expectTotalData) + }) + r.pipe(w) + } +}) diff --git a/test/ours/errors.js b/src/test/ours/test-errors.js similarity index 69% rename from test/ours/errors.js rename to src/test/ours/test-errors.js index fc32605923..bd9c810c51 100644 --- a/test/ours/errors.js +++ b/src/test/ours/test-errors.js @@ -1,148 +1,132 @@ -var tap = require('tap'); -var assert = require('assert'); -var errors = require('../../errors').codes; - -function expect (err, Base, name, code, message) { - assert(err instanceof Base); - assert.strictEqual(err.name, name); - assert.strictEqual(err.code, code); - assert.strictEqual(err.message, message); +'use strict' + +const t = require('tap') +const { codes: errors } = require('../../lib/internal/errors') + +function checkError(err, Base, name, code, message) { + t.ok(err instanceof Base) + t.equal(err.name, `${name} [${code}]`) + t.equal(err.code, code) + t.equal(err.message, message) } -expect( +// Update this numbers based on the number of checkError below multiplied by the assertions within checkError +t.plan(17 * 4) + +checkError( new errors.ERR_INVALID_OPT_VALUE('name', 0), TypeError, 'TypeError', 'ERR_INVALID_OPT_VALUE', 'The value "0" is invalid for option "name"' -); +) -expect( +checkError( new errors.ERR_INVALID_OPT_VALUE('name', undefined), TypeError, 'TypeError', 'ERR_INVALID_OPT_VALUE', 'The value "undefined" is invalid for option "name"' -); +) -expect( +checkError( new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0), TypeError, 'TypeError', 'ERR_INVALID_ARG_TYPE', 'The "chunk" argument must be one of type string, Buffer, or Uint8Array. Received type number' -); +) -expect( +checkError( new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'), TypeError, 'TypeError', 'ERR_INVALID_ARG_TYPE', 'The first argument must not be of type string. Received type string' -); +) -expect( +checkError( new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined), TypeError, 'TypeError', 'ERR_INVALID_ARG_TYPE', 'The "obj.prop" property must be of type string. Received type undefined' -); +) -expect( +checkError( new errors.ERR_STREAM_PUSH_AFTER_EOF(), Error, 'Error', 'ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF' -); +) -expect( +checkError( new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'), Error, 'Error', 'ERR_METHOD_NOT_IMPLEMENTED', 'The _read() method is not implemented' -); +) -expect( +checkError( new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'), Error, 'Error', 'ERR_METHOD_NOT_IMPLEMENTED', 'The _write() method is not implemented' -); +) -expect( - new errors.ERR_STREAM_PREMATURE_CLOSE(), - Error, - 'Error', - 'ERR_STREAM_PREMATURE_CLOSE', - 'Premature close' -); +checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close') -expect( +checkError( new errors.ERR_STREAM_DESTROYED('pipe'), Error, 'Error', 'ERR_STREAM_DESTROYED', 'Cannot call pipe after a stream was destroyed' -); +) -expect( +checkError( new errors.ERR_STREAM_DESTROYED('write'), Error, 'Error', 'ERR_STREAM_DESTROYED', 'Cannot call write after a stream was destroyed' -); +) -expect( +checkError( new errors.ERR_MULTIPLE_CALLBACK(), Error, 'Error', 'ERR_MULTIPLE_CALLBACK', 'Callback called multiple times' -); +) -expect( - new errors.ERR_STREAM_CANNOT_PIPE(), - Error, - 'Error', - 'ERR_STREAM_CANNOT_PIPE', - 'Cannot pipe, not readable' -); +checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable') -expect( - new errors.ERR_STREAM_WRITE_AFTER_END(), - Error, - 'Error', - 'ERR_STREAM_WRITE_AFTER_END', - 'write after end' -); +checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end') -expect( +checkError( new errors.ERR_STREAM_NULL_VALUES(), TypeError, 'TypeError', 'ERR_STREAM_NULL_VALUES', 'May not write null values to stream' -); +) -expect( +checkError( new errors.ERR_UNKNOWN_ENCODING('foo'), TypeError, 'TypeError', 'ERR_UNKNOWN_ENCODING', 'Unknown encoding: foo' -); +) -expect( +checkError( new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(), Error, 'Error', 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event' -); - -require('tap').pass('sync done'); +) diff --git a/src/test/ours/test-lolex-fake-timers.js b/src/test/ours/test-lolex-fake-timers.js new file mode 100644 index 0000000000..3198952962 --- /dev/null +++ b/src/test/ours/test-lolex-fake-timers.js @@ -0,0 +1,40 @@ +'use strict' + +require('../common') +const t = require('tap') +const util = require('util') +const lolex = require('lolex') +const Transform = require('../../lib').Transform + +t.plan(1) + +function MyTransform() { + Transform.call(this) +} + +util.inherits(MyTransform, Transform) + +const clock = lolex.install({ toFake: ['setImmediate', 'nextTick'] }) +let stream2DataCalled = false + +const stream = new MyTransform() +stream.on('data', function () { + stream.on('end', function () { + const stream2 = new MyTransform() + stream2.on('data', function () { + stream2.on('end', function () { + stream2DataCalled = true + }) + setImmediate(function () { + stream2.end() + }) + }) + stream2.emit('data') + }) + stream.end() +}) +stream.emit('data') + +clock.runAll() +clock.uninstall() +t.ok(stream2DataCalled) diff --git a/src/test/ours/test-stream-sync-write.js b/src/test/ours/test-stream-sync-write.js new file mode 100644 index 0000000000..7a467ec2dd --- /dev/null +++ b/src/test/ours/test-stream-sync-write.js @@ -0,0 +1,44 @@ +'use strict' + +require('../common') +const t = require('tap') +const util = require('util') +const stream = require('../../lib') +const WritableStream = stream.Writable + +t.plan(1) + +const InternalStream = function () { + WritableStream.call(this) +} +util.inherits(InternalStream, WritableStream) + +let invocations = 0 +InternalStream.prototype._write = function (chunk, encoding, callback) { + callback() +} + +const internalStream = new InternalStream() + +const ExternalStream = function (writable) { + this._writable = writable + WritableStream.call(this) +} +util.inherits(ExternalStream, WritableStream) + +ExternalStream.prototype._write = function (chunk, encoding, callback) { + this._writable.write(chunk, encoding, callback) +} + +const externalStream = new ExternalStream(internalStream) + +for (let i = 0; i < 2000; i++) { + externalStream.write(i.toString(), () => { + invocations++ + }) +} + +externalStream.end() +externalStream.on('finish', () => { + t.equal(invocations, 2000) +}) diff --git a/src/test/test-browser.js b/src/test/test-browser.js new file mode 100644 index 0000000000..e7ad511125 --- /dev/null +++ b/src/test/test-browser.js @@ -0,0 +1,84 @@ +'use strict' + +const test = require('tape') + +// if (!global.console) { +// global.console = {} +// } +// if (!global.console.log) { +// global.console.log = function () {} +// } +// if (!global.console.error) { +// global.console.error = global.console.log +// } +// if (!global.console.info) { +// global.console.info = global.console.log +// } + +// // TODO: add replacements instead +// global.process = { +// env: {}, +// on: function () {}, +// cwd: function () { +// return '/' +// }, +// binding: function () { +// return { +// hasTracing: false +// } +// } +// } + +test('streams', function (t) { + require('./browser/test-stream-big-packet')(t) + require('./browser/test-stream-big-push')(t) + require('./browser/test-stream-duplex')(t) + require('./browser/test-stream-end-paused')(t) + require('./browser/test-stream-ispaused')(t) + require('./browser/test-stream-finished')(t) + require('./browser/test-stream-pipeline')(t) + require('./browser/test-stream-pipe-after-end')(t) + require('./browser/test-stream-pipe-cleanup')(t) + require('./browser/test-stream-pipe-cleanup-pause')(t) + require('./browser/test-stream-pipe-error-handling')(t) + require('./browser/test-stream-pipe-event')(t) + require('./browser/test-stream-push-order')(t) + require('./browser/test-stream-push-strings')(t) + require('./browser/test-stream-readable-constructor-set-methods')(t) + require('./browser/test-stream-readable-event')(t) + require('./browser/test-stream-transform-constructor-set-methods')(t) + require('./browser/test-stream-transform-objectmode-falsey-value')(t) + require('./browser/test-stream-transform-split-objectmode')(t) + require('./browser/test-stream-unshift-empty-chunk')(t) + require('./browser/test-stream-unshift-read-race')(t) + require('./browser/test-stream-writable-change-default-encoding')(t) + require('./browser/test-stream-writable-constructor-set-methods')(t) + require('./browser/test-stream-writable-decoded-encoding')(t) + require('./browser/test-stream-writev')(t) + require('./browser/test-stream-sync-write')(t) + require('./browser/test-stream-pipe-without-listenerCount') +}) + +test('streams 2', function (t) { + require('./browser/test-stream2-base64-single-char-read-end')(t) + require('./browser/test-stream2-compatibility')(t) + require('./browser/test-stream2-large-read-stall')(t) + require('./browser/test-stream2-objects')(t) + require('./browser/test-stream2-pipe-error-handling')(t) + require('./browser/test-stream2-pipe-error-once-listener')(t) + require('./browser/test-stream2-push')(t) + require('./browser/test-stream2-readable-empty-buffer-no-eof')(t) + // require('./browser/test-stream2-readable-from-list')(t); + // require('./browser/test-stream2-transform')(t); + require('./browser/test-stream2-set-encoding')(t) + require('./browser/test-stream2-readable-legacy-drain')(t) + require('./browser/test-stream2-readable-wrap-empty')(t) + require('./browser/test-stream2-readable-non-empty-end')(t) + require('./browser/test-stream2-readable-wrap')(t) + require('./browser/test-stream2-unpipe-drain')(t) + require('./browser/test-stream2-writable')(t) +}) + +test('streams 3', function (t) { + require('./browser/test-stream3-pause-then-read')(t) +}) diff --git a/src/util.js b/src/util.js new file mode 100644 index 0000000000..e1b6d12a62 --- /dev/null +++ b/src/util.js @@ -0,0 +1,49 @@ +'use strict' + +let debugUtil +try { + debugUtil = require('util') +} catch (e) { + // No-op +} + +module.exports = { + inherits: require('inherits'), + debuglog: debugUtil?.debuglog ? debugUtil.debuglog : () => function () {}, + once(callback) { + let called = false + + return function (...args) { + if (called) { + return + } + called = true + callback.apply(this, args) + } + }, + // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args + format(format, ...args) { + return format.replace(/%([sdifj])/g, function (...[_unused, type]) { + const replacement = args.shift() + + if (type === 'f') { + return replacement.toFixed(6) + } else if (type === 'j') { + return JSON.stringify(replacement) + } else { + return replacement.toString() + } + }) + }, + promisify: function (fn) { + return new Promise((resolve, reject) => { + fn((err, ...args) => { + if (err) { + return reject(err) + } + + return resolve(...args) + }) + }) + } +} diff --git a/src/uv-browser.js b/src/uv-browser.js new file mode 100644 index 0000000000..c293a47b1c --- /dev/null +++ b/src/uv-browser.js @@ -0,0 +1,93 @@ +'use strict' + +// Regenerate this file if needed by requiring process.binding('uv') in the REPL + +module.exports = { + errmap: new Map([ + [-7, ['E2BIG', 'argument list too long']], + [-13, ['EACCES', 'permission denied']], + [-48, ['EADDRINUSE', 'address already in use']], + [-49, ['EADDRNOTAVAIL', 'address not available']], + [-47, ['EAFNOSUPPORT', 'address family not supported']], + [-35, ['EAGAIN', 'resource temporarily unavailable']], + [-3000, ['EAI_ADDRFAMILY', 'address family not supported']], + [-3001, ['EAI_AGAIN', 'temporary failure']], + [-3002, ['EAI_BADFLAGS', 'bad ai_flags value']], + [-3013, ['EAI_BADHINTS', 'invalid value for hints']], + [-3003, ['EAI_CANCELED', 'request canceled']], + [-3004, ['EAI_FAIL', 'permanent failure']], + [-3005, ['EAI_FAMILY', 'ai_family not supported']], + [-3006, ['EAI_MEMORY', 'out of memory']], + [-3007, ['EAI_NODATA', 'no address']], + [-3008, ['EAI_NONAME', 'unknown node or service']], + [-3009, ['EAI_OVERFLOW', 'argument buffer overflow']], + [-3014, ['EAI_PROTOCOL', 'resolved protocol is unknown']], + [-3010, ['EAI_SERVICE', 'service not available for socket type']], + [-3011, ['EAI_SOCKTYPE', 'socket type not supported']], + [-37, ['EALREADY', 'connection already in progress']], + [-9, ['EBADF', 'bad file descriptor']], + [-16, ['EBUSY', 'resource busy or locked']], + [-89, ['ECANCELED', 'operation canceled']], + [-4080, ['ECHARSET', 'invalid Unicode character']], + [-53, ['ECONNABORTED', 'software caused connection abort']], + [-61, ['ECONNREFUSED', 'connection refused']], + [-54, ['ECONNRESET', 'connection reset by peer']], + [-39, ['EDESTADDRREQ', 'destination address required']], + [-17, ['EEXIST', 'file already exists']], + [-14, ['EFAULT', 'bad address in system call argument']], + [-27, ['EFBIG', 'file too large']], + [-65, ['EHOSTUNREACH', 'host is unreachable']], + [-4, ['EINTR', 'interrupted system call']], + [-22, ['EINVAL', 'invalid argument']], + [-5, ['EIO', 'i/o error']], + [-56, ['EISCONN', 'socket is already connected']], + [-21, ['EISDIR', 'illegal operation on a directory']], + [-62, ['ELOOP', 'too many symbolic links encountered']], + [-24, ['EMFILE', 'too many open files']], + [-40, ['EMSGSIZE', 'message too long']], + [-63, ['ENAMETOOLONG', 'name too long']], + [-50, ['ENETDOWN', 'network is down']], + [-51, ['ENETUNREACH', 'network is unreachable']], + [-23, ['ENFILE', 'file table overflow']], + [-55, ['ENOBUFS', 'no buffer space available']], + [-19, ['ENODEV', 'no such device']], + [-2, ['ENOENT', 'no such file or directory']], + [-12, ['ENOMEM', 'not enough memory']], + [-4056, ['ENONET', 'machine is not on the network']], + [-42, ['ENOPROTOOPT', 'protocol not available']], + [-28, ['ENOSPC', 'no space left on device']], + [-78, ['ENOSYS', 'function not implemented']], + [-57, ['ENOTCONN', 'socket is not connected']], + [-20, ['ENOTDIR', 'not a directory']], + [-66, ['ENOTEMPTY', 'directory not empty']], + [-38, ['ENOTSOCK', 'socket operation on non-socket']], + [-45, ['ENOTSUP', 'operation not supported on socket']], + [-84, ['EOVERFLOW', 'value too large for defined data type']], + [-1, ['EPERM', 'operation not permitted']], + [-32, ['EPIPE', 'broken pipe']], + [-100, ['EPROTO', 'protocol error']], + [-43, ['EPROTONOSUPPORT', 'protocol not supported']], + [-41, ['EPROTOTYPE', 'protocol wrong type for socket']], + [-34, ['ERANGE', 'result too large']], + [-30, ['EROFS', 'read-only file system']], + [-58, ['ESHUTDOWN', 'cannot send after transport endpoint shutdown']], + [-29, ['ESPIPE', 'invalid seek']], + [-3, ['ESRCH', 'no such process']], + [-60, ['ETIMEDOUT', 'connection timed out']], + [-26, ['ETXTBSY', 'text file is busy']], + [-18, ['EXDEV', 'cross-device link not permitted']], + [-4094, ['UNKNOWN', 'unknown error']], + [-4095, ['EOF', 'end of file']], + [-6, ['ENXIO', 'no such device or address']], + [-31, ['EMLINK', 'too many links']], + [-64, ['EHOSTDOWN', 'host is down']], + [-4030, ['EREMOTEIO', 'remote I/O error']], + [-25, ['ENOTTY', 'inappropriate ioctl for device']], + [-79, ['EFTYPE', 'inappropriate file type or format']], + [-92, ['EILSEQ', 'illegal byte sequence']], + [-44, ['ESOCKTNOSUPPORT', 'socket type not supported']] + ]), + UV_EAI_MEMORY: -3006, + UV_EAI_NODATA: -3007, + UV_EAI_NONAME: -3008 +} diff --git a/tap.yml b/tap.yml new file mode 100644 index 0000000000..dfaff10006 --- /dev/null +++ b/tap.yml @@ -0,0 +1,5 @@ +--- +bail: true +coverage: false +node-arg: + - --expose-internals diff --git a/test/browser.js b/test/browser.js deleted file mode 100644 index d903d1f11b..0000000000 --- a/test/browser.js +++ /dev/null @@ -1,81 +0,0 @@ -if (!global.console) { - global.console = {}; -} -if (!global.console.log) { - global.console.log = function () {}; -} -if (!global.console.error) { - global.console.error = global.console.log; -} -if (!global.console.info) { - global.console.info = global.console.log; -} -var test = require('tape'); -var util = require('util'); - -// TODO: add replacements instead -global.process = { - env: {}, - on: function () {}, - cwd: function () { - return '/'; - }, - binding: function () { - return { - hasTracing: false - }; - } -}; - -test('streams', function (t) { - require('./browser/test-stream-big-packet')(t); - require('./browser/test-stream-big-push')(t); - require('./browser/test-stream-duplex')(t); - require('./browser/test-stream-end-paused')(t); - require('./browser/test-stream-ispaused')(t); - require('./browser/test-stream-finished')(t); - require('./browser/test-stream-pipeline')(t); - require('./browser/test-stream-pipe-after-end')(t); - require('./browser/test-stream-pipe-cleanup')(t); - require('./browser/test-stream-pipe-cleanup-pause')(t); - require('./browser/test-stream-pipe-error-handling')(t); - require('./browser/test-stream-pipe-event')(t); - require('./browser/test-stream-push-order')(t); - require('./browser/test-stream-push-strings')(t); - require('./browser/test-stream-readable-constructor-set-methods')(t); - require('./browser/test-stream-readable-event')(t); - require('./browser/test-stream-transform-constructor-set-methods')(t); - require('./browser/test-stream-transform-objectmode-falsey-value')(t); - require('./browser/test-stream-transform-split-objectmode')(t); - require('./browser/test-stream-unshift-empty-chunk')(t); - require('./browser/test-stream-unshift-read-race')(t); - require('./browser/test-stream-writable-change-default-encoding')(t); - require('./browser/test-stream-writable-constructor-set-methods')(t); - require('./browser/test-stream-writable-decoded-encoding')(t); - require('./browser/test-stream-writev')(t); - require('./browser/test-stream-sync-write')(t); - require('./browser/test-stream-pipe-without-listenerCount'); -}); - -test('streams 2', function (t) { - require('./browser/test-stream2-base64-single-char-read-end')(t); - require('./browser/test-stream2-compatibility')(t); - require('./browser/test-stream2-large-read-stall')(t); - require('./browser/test-stream2-objects')(t); - require('./browser/test-stream2-pipe-error-handling')(t); - require('./browser/test-stream2-pipe-error-once-listener')(t); - require('./browser/test-stream2-push')(t); - require('./browser/test-stream2-readable-empty-buffer-no-eof')(t); - // require('./browser/test-stream2-readable-from-list')(t); - // require('./browser/test-stream2-transform')(t); - require('./browser/test-stream2-set-encoding')(t); - require('./browser/test-stream2-readable-legacy-drain')(t); - require('./browser/test-stream2-readable-wrap-empty')(t); - require('./browser/test-stream2-readable-non-empty-end')(t); - require('./browser/test-stream2-readable-wrap')(t); - require('./browser/test-stream2-unpipe-drain')(t); - require('./browser/test-stream2-writable')(t); -}); -test('streams 3', function (t) { - require('./browser/test-stream3-pause-then-read')(t); -}); diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js deleted file mode 100644 index 2c64ce8e59..0000000000 --- a/test/browser/test-stream-big-packet.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict'; -var common = require('../common'); -var inherits = require('inherits'); -var stream = require('../../'); - -module.exports = function (t) { - t.test('big packet', function (t) { - t.plan(3); - var passed = false; - - function PassThrough() { - stream.Transform.call(this); - }; - inherits(PassThrough, stream.Transform); - PassThrough.prototype._transform = function(chunk, encoding, done) { - this.push(chunk); - done(); - }; - - function TestStream() { - stream.Transform.call(this); - }; - inherits(TestStream, stream.Transform); - TestStream.prototype._transform = function(chunk, encoding, done) { - if (!passed) { - // Char 'a' only exists in the last write - passed = indexOf(chunk.toString(), 'a') >= 0; - } - if (passed) { - t.ok(passed); - } - done(); - }; - - var s1 = new PassThrough(); - var s2 = new PassThrough(); - var s3 = new TestStream(); - s1.pipe(s3); - // Don't let s2 auto close which may close s3 - s2.pipe(s3, {end: false}); - - // We must write a buffer larger than highWaterMark - var big = Buffer.alloc(s1._writableState.highWaterMark + 1); - big.fill('x'); - - // Since big is larger than highWaterMark, it will be buffered internally. - t.ok(!s1.write(big)); - // 'tiny' is small enough to pass through internal buffer. - t.ok(s2.write('tiny')); - - // Write some small data in next IO loop, which will never be written to s3 - // Because 'drain' event is not emitted from s1 and s1 is still paused - setImmediate(s1.write.bind(s1), 'later'); - - function indexOf (xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; - } - }); -} diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js deleted file mode 100644 index 7403e16937..0000000000 --- a/test/browser/test-stream-big-push.js +++ /dev/null @@ -1,68 +0,0 @@ -'use strict'; -var common = require('../common'); -var stream = require('../../'); -module.exports = function (t) { - t.test('big push', function (t) { - - var str = 'asdfasdfasdfasdfasdf'; - - var r = new stream.Readable({ - highWaterMark: 5, - encoding: 'utf8' - }); - - var reads = 0; - var eofed = false; - var ended = false; - - r._read = function(n) { - if (reads === 0) { - setTimeout(function() { - r.push(str); - }); - reads++; - } else if (reads === 1) { - var ret = r.push(str); - t.equal(ret, false); - reads++; - } else { - t.notOk(eofed); - eofed = true; - r.push(null); - } - }; - - r.on('end', function() { - ended = true; - }); - - // push some data in to start. - // we've never gotten any read event at this point. - var ret = r.push(str); - // should be false. > hwm - t.notOk(ret); - var chunk = r.read(); - t.equal(chunk, str); - chunk = r.read(); - t.equal(chunk, null); - - r.once('readable', function() { - // this time, we'll get *all* the remaining data, because - // it's been added synchronously, as the read WOULD take - // us below the hwm, and so it triggered a _read() again, - // which synchronously added more, which we then return. - chunk = r.read(); - t.equal(chunk, str + str); - - chunk = r.read(); - t.equal(chunk, null); - }); - - r.on('end', function() { - t.ok(eofed); - t.ok(ended); - t.equal(reads, 2); - t.end(); - }); - }); -} diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js deleted file mode 100644 index 9bfd6af145..0000000000 --- a/test/browser/test-stream-duplex.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Duplex = require('../../').Transform; - -var stream = new Duplex({ objectMode: true }); -module.exports = function (t) { - t.test('duplex', function (t) { - t.plan(4); - t.ok(stream._readableState.objectMode); - t.ok(stream._writableState.objectMode); - - var written; - var read; - - stream._write = function(obj, _, cb) { - written = obj; - cb(); - }; - - stream._read = function() {}; - - stream.on('data', function(obj) { - read = obj; - }); - - stream.push({ val: 1 }); - stream.end({ val: 2 }); - - stream.on('end', function() { - t.equal(read.val, 1); - t.equal(written.val, 2); - }); - }); -} diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js deleted file mode 100644 index ff56dd8127..0000000000 --- a/test/browser/test-stream-end-paused.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; -var common = require('../common'); - - -// Make sure we don't miss the end event for paused 0-length streams - -var Readable = require('../../').Readable; -var stream = new Readable(); -module.exports = function (t) { - t.test('end pause', function (t) { - t.plan(2); - var calledRead = false; - stream._read = function() { - t.notOk(calledRead); - calledRead = true; - this.push(null); - }; - - stream.on('data', function() { - throw new Error('should not ever get data'); - }); - stream.pause(); - - setTimeout(function() { - stream.on('end', function() { - t.ok(calledRead); - }); - stream.resume(); - }); - - }); -} diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js deleted file mode 100644 index bd0de3e858..0000000000 --- a/test/browser/test-stream-finished.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; - - -var common = require('../common'); - -var _require = require('../../'), - Writable = _require.Writable, - Readable = _require.Readable, - Transform = _require.Transform, - finished = _require.finished; - -module.exports = function (t) { - t.test('readable finished', function (t) { - - var rs = new Readable({ - read: function read() {} - }); - finished(rs, common.mustCall(function (err) { - t.ok(!err, 'no error'); - t.end(); - })); - rs.push(null); - rs.resume(); - }); - t.test('writable finished', function (t) { - var ws = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - finished(ws, common.mustCall(function (err) { - t.ok(!err, 'no error'); - t.end(); - })); - ws.end(); - }); - t.test('transform finished', function (t) { - var tr = new Transform({ - transform: function transform(data, enc, cb) { - cb(); - } - }); - var finish = false; - var ended = false; - tr.on('end', function () { - ended = true; - }); - tr.on('finish', function () { - finish = true; - }); - finished(tr, common.mustCall(function (err) { - t.ok(!err, 'no error'); - t.ok(finish); - t.ok(ended); - t.end(); - })); - tr.end(); - tr.resume(); - }); -}; diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js deleted file mode 100644 index d080f41ba4..0000000000 --- a/test/browser/test-stream-ispaused.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict'; -var common = require('../common'); - -var stream = require('../../'); -module.exports = function (t) { - t.test('is paused', function (t) { - var readable = new stream.Readable(); - - // _read is a noop, here. - readable._read = Function(); - - // default state of a stream is not "paused" - t.notOk(readable.isPaused()); - - // make the stream start flowing... - readable.on('data', Function()); - - // still not paused. - t.notOk(readable.isPaused()); - - readable.pause(); - t.ok(readable.isPaused()); - readable.resume(); - t.notOk(readable.isPaused()); - t.end(); - }); -} diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js deleted file mode 100644 index 0ca97b3d70..0000000000 --- a/test/browser/test-stream-pipe-after-end.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../lib/_stream_readable'); -var Writable = require('../../lib/_stream_writable'); -var inherits = require('inherits'); -module.exports = function (t) { - t.test('pipe after end', function (t) { - t.plan(4); - inherits(TestReadable, Readable); - function TestReadable(opt) { - if (!(this instanceof TestReadable)) - return new TestReadable(opt); - Readable.call(this, opt); - this._ended = false; - } - - TestReadable.prototype._read = function(n) { - if (this._ended) - this.emit('error', new Error('_read called twice')); - this._ended = true; - this.push(null); - }; - - inherits(TestWritable, Writable); - function TestWritable(opt) { - if (!(this instanceof TestWritable)) - return new TestWritable(opt); - Writable.call(this, opt); - this._written = []; - } - - TestWritable.prototype._write = function(chunk, encoding, cb) { - this._written.push(chunk); - cb(); - }; - - // this one should not emit 'end' until we read() from it later. - var ender = new TestReadable(); - var enderEnded = false; - - // what happens when you pipe() a Readable that's already ended? - var piper = new TestReadable(); - // pushes EOF null, and length=0, so this will trigger 'end' - piper.read(); - - setTimeout(function() { - ender.on('end', function() { - enderEnded = true; - t.ok(true, 'enderEnded'); - }); - t.notOk(enderEnded); - var c = ender.read(); - t.equal(c, null); - - var w = new TestWritable(); - w.on('finish', function() { - t.ok(true, 'writableFinished'); - }); - piper.pipe(w); - - }); - }); -} diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js deleted file mode 100644 index 31fb97f7fa..0000000000 --- a/test/browser/test-stream-pipe-cleanup-pause.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; -var common = require('../common'); -var stream = require('../../'); -module.exports = function (t) { - t.test('pipe cleanup pause', function (t) { - t.plan(3); - var reader = new stream.Readable(); - var writer1 = new stream.Writable(); - var writer2 = new stream.Writable(); - - // 560000 is chosen here because it is larger than the (default) highWaterMark - // and will cause `.write()` to return false - // See: https://github.com/nodejs/node/issues/2323 - var buffer = Buffer.alloc(560000); - - reader._read = function() {}; - - writer1._write = common.mustCall(function(chunk, encoding, cb) { - this.emit('chunk-received'); - cb(); - }, 1); - writer1.once('chunk-received', function() { - reader.unpipe(writer1); - reader.pipe(writer2); - reader.push(buffer); - setImmediate(function() { - reader.push(buffer); - setImmediate(function() { - reader.push(buffer); - }); - }); - }); - - writer2._write = function(chunk, encoding, cb) { - t.ok(true); - cb(); - }; - - reader.pipe(writer1); - reader.push(buffer); - }); -}; diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js deleted file mode 100644 index dd2b6d5269..0000000000 --- a/test/browser/test-stream-pipe-cleanup.js +++ /dev/null @@ -1,108 +0,0 @@ -'use strict'; -// This test asserts that Stream.prototype.pipe does not leave listeners -// hanging on the source or dest. - -var common = require('../common'); -var stream = require('../../'); -var inherits = require('inherits'); -module.exports = function (t) { - t.test('pipe cleanup', function (t) { - if (/^v0\.8\./.test(process.version)) - return t.end(); - - function Writable() { - this.writable = true; - this.endCalls = 0; - require('stream').Stream.call(this); - } - inherits(Writable, require('stream').Stream); - Writable.prototype.end = function() { - this.endCalls++; - }; - - Writable.prototype.destroy = function() { - this.endCalls++; - }; - - function Readable() { - this.readable = true; - require('stream').Stream.call(this); - } - inherits(Readable, require('stream').Stream); - - function Duplex() { - this.readable = true; - Writable.call(this); - } - inherits(Duplex, Writable); - - var i = 0; - var limit = 100; - - var w = new Writable(); - - var r; - - for (i = 0; i < limit; i++) { - r = new Readable(); - r.pipe(w); - r.emit('end'); - } - t.equal(0, r.listeners('end').length); - t.equal(limit, w.endCalls); - - w.endCalls = 0; - - for (i = 0; i < limit; i++) { - r = new Readable(); - r.pipe(w); - r.emit('close'); - } - t.equal(0, r.listeners('close').length); - t.equal(limit, w.endCalls); - - w.endCalls = 0; - - r = new Readable(); - - for (i = 0; i < limit; i++) { - w = new Writable(); - r.pipe(w); - w.emit('close'); - } - t.equal(0, w.listeners('close').length); - - r = new Readable(); - w = new Writable(); - var d = new Duplex(); - r.pipe(d); // pipeline A - d.pipe(w); // pipeline B - t.equal(r.listeners('end').length, 2); // A.onend, A.cleanup - t.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup - t.equal(d.listeners('end').length, 2); // B.onend, B.cleanup - t.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup - t.equal(w.listeners('end').length, 0); - t.equal(w.listeners('close').length, 1); // B.cleanup - - r.emit('end'); - t.equal(d.endCalls, 1); - t.equal(w.endCalls, 0); - t.equal(r.listeners('end').length, 0); - t.equal(r.listeners('close').length, 0); - t.equal(d.listeners('end').length, 2); // B.onend, B.cleanup - t.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup - t.equal(w.listeners('end').length, 0); - t.equal(w.listeners('close').length, 1); // B.cleanup - - d.emit('end'); - t.equal(d.endCalls, 1); - t.equal(w.endCalls, 1); - t.equal(r.listeners('end').length, 0); - t.equal(r.listeners('close').length, 0); - t.equal(d.listeners('end').length, 0); - t.equal(d.listeners('close').length, 0); - t.equal(w.listeners('end').length, 0); - t.equal(w.listeners('close').length, 0); - t.end(); - }); -} diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js deleted file mode 100644 index 48a8bb375d..0000000000 --- a/test/browser/test-stream-pipe-error-handling.js +++ /dev/null @@ -1,102 +0,0 @@ -'use strict'; -var common = require('../common'); -var Stream = require('stream').Stream; - -module.exports = function (t) { - t.test('Error Listener Catches', function (t) { - t.plan(1); - var source = new Stream(); - var dest = new Stream(); - - source.pipe(dest); - - var gotErr = null; - source.on('error', function(err) { - gotErr = err; - }); - - var err = new Error('This stream turned into bacon.'); - source.emit('error', err); - t.strictEqual(gotErr, err); - }); - - t.test('Error WithoutListener Throws', function (t) { - t.plan(1); - var source = new Stream(); - var dest = new Stream(); - - source.pipe(dest); - - var err = new Error('This stream turned into bacon.'); - - var gotErr = null; - try { - source.emit('error', err); - } catch (e) { - gotErr = e; - } - - t.strictEqual(gotErr, err); - }); - - t.test('Error With Removed Listener Throws', function (t) { - t.plan(2); - var EE = require('events').EventEmitter; - var R = require('../../').Readable; - var W = require('../../').Writable; - - var r = new R(); - var w = new W(); - var removed = false; - - r._read = function() { - setTimeout(function() { - t.ok(removed); - t.throws(function() { - w.emit('error', new Error('fail')); - }); - }); - }; - - w.on('error', myOnError); - r.pipe(w); - w.removeListener('error', myOnError); - removed = true; - - function myOnError(er) { - throw new Error('this should not happen'); - } - }); - - t.test('Error With Removed Listener Throws', function (t) { - t.plan(2); - var EE = require('events').EventEmitter; - var R = require('../../').Readable; - var W = require('../../').Writable; - - var r = new R(); - var w = new W(); - var removed = false; - var caught = false; - - r._read = function() { - setTimeout(function() { - t.ok(removed); - w.emit('error', new Error('fail')); - }); - }; - - w.on('error', myOnError); - w._write = function() {}; - - r.pipe(w); - // Removing some OTHER random listener should not do anything - w.removeListener('error', function() {}); - removed = true; - - function myOnError(er) { - t.notOk(caught); - caught = true; - } - }); -} diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js deleted file mode 100644 index c0d7a606c2..0000000000 --- a/test/browser/test-stream-pipe-event.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; -var common = require('../common'); -var stream = require('../../'); -var inherits = require('inherits'); -module.exports = function (t) { - t.test('pipe event', function (t) { - t.plan(1); - function Writable() { - this.writable = true; - require('stream').Stream.call(this); - } - inherits(Writable, require('stream').Stream); - - function Readable() { - this.readable = true; - require('stream').Stream.call(this); - } - inherits(Readable, require('stream').Stream); - - var passed = false; - - var w = new Writable(); - w.on('pipe', function(src) { - passed = true; - }); - - var r = new Readable(); - r.pipe(w); - - t.ok(passed); - }); -} diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js deleted file mode 100644 index 742e2d233d..0000000000 --- a/test/browser/test-stream-pipe-without-listenerCount.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict'; -var Stream = require('../../'); - -module.exports = function (t) { - t.tets('pipe without listenerCount', function (t) { - t.plan(2); - var r = new Stream({ - read: function (){}}); - r.listenerCount = undefined; - - var w = new Stream(); - w.listenerCount = undefined; - - w.on('pipe', function() { - r.emit('error', new Error('Readable Error')); - w.emit('error', new Error('Writable Error')); - }); - r.on('error', function (e) { - t.ok(e, 'readable error'); - }); - w.on('error', function (e) { - t.ok(e, 'writable error'); - }); - r.pipe(w); - - }); -} diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js deleted file mode 100644 index 833d58887c..0000000000 --- a/test/browser/test-stream-pipeline.js +++ /dev/null @@ -1,112 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - -var common = require('../common'); - -var _require = require('../../'), - Writable = _require.Writable, - Readable = _require.Readable, - Transform = _require.Transform, - finished = _require.finished, - pipeline = _require.pipeline; - -module.exports = function (t) { - t.test('pipeline', function (t) { - var finished = false; - var processed = []; - var expected = [bufferShim.from('a'), bufferShim.from('b'), - bufferShim.from('c')]; - var read = new Readable({ - read: function read() { - } - }); - var write = new Writable({ - write: function write(data, enc, cb) { - processed.push(data); - cb(); - } - }); - write.on('finish', function () { - finished = true; - }); - - for (var i = 0; i < expected.length; i++) { - read.push(expected[i]); - } - - read.push(null); - pipeline(read, write, common.mustCall(function (err) { - t.ok(!err, 'no error'); - t.ok(finished); - t.deepEqual(processed, expected); - t.end(); - })); - }); - t.test('pipeline missing args', function (t) { - var _read = new Readable({ - read: function read() { - } - }); - - t.throws(function () { - pipeline(_read, function () { - }); - }); - t.throws(function () { - pipeline(function () { - }); - }); - t.throws(function () { - pipeline(); - }); - t.end(); - }); - t.test('pipeline error', function (t) { - var _read2 = new Readable({ - read: function read() { - } - }); - - var _write = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - _read2.push('data'); - - setImmediate(function () { - return _read2.destroy(); - }); - pipeline(_read2, _write, common.mustCall(function (err) { - t.ok(err, 'should have an error'); - t.end(); - })); - }); - t.test('pipeline destroy', function () { - var _read3 = new Readable({ - read: function read() { - } - }); - - var _write2 = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - _read3.push('data'); - - setImmediate(function () { - return _read3.destroy(new Error('kaboom')); - }); - var dst = pipeline(_read3, _write2, common.mustCall(function (err) { - t.equal(err.message, 'kaboom'); - t.end(); - })); - t.equal(dst, _write2); - }); -}; diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js deleted file mode 100644 index 22fe17d1b0..0000000000 --- a/test/browser/test-stream-push-order.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict'; -var common = require('../common'); -var Readable = require('../../').Readable; -module.exports = function (t) { - t.test('push order', function (t) { - t.plan(1); - var s = new Readable({ - highWaterMark: 20, - encoding: 'ascii' - }); - - var list = ['1', '2', '3', '4', '5', '6']; - - s._read = function(n) { - var one = list.shift(); - if (!one) { - s.push(null); - } else { - var two = list.shift(); - s.push(one); - s.push(two); - } - }; - - var v = s.read(0); - - // ACTUALLY [1, 3, 5, 6, 4, 2] - - setTimeout(function() { - t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6'); - }); - }); -} diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js deleted file mode 100644 index 1de240efd8..0000000000 --- a/test/browser/test-stream-push-strings.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../').Readable; -var inherits = require('inherits'); - -module.exports = function (t) { - t.test('push strings', function (t) { - t.plan(2); - inherits(MyStream, Readable); - function MyStream(options) { - Readable.call(this, options); - this._chunks = 3; - } - - MyStream.prototype._read = function(n) { - switch (this._chunks--) { - case 0: - return this.push(null); - case 1: - return setTimeout(function() { - this.push('last chunk'); - }.bind(this), 100); - case 2: - return this.push('second to last chunk'); - case 3: - return process.nextTick(function() { - this.push('first chunk'); - }.bind(this)); - default: - throw new Error('?'); - } - }; - var expect = [ 'first chunksecond to last chunk', 'last chunk' ]; - - var ms = new MyStream(); - var results = []; - ms.on('readable', function() { - var chunk; - while (null !== (chunk = ms.read())) - results.push(chunk + ''); - }); - - ms.on('end', function() { - t.equal(ms._chunks, -1); - t.deepEqual(results, expect); - }); - }); -} diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js deleted file mode 100644 index fa0d59ba28..0000000000 --- a/test/browser/test-stream-readable-constructor-set-methods.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../').Readable; -module.exports = function (t) { - t.test('readable constructor set methods', function (t) { - t.plan(2); - var _readCalled = false; - function _read(n) { - _readCalled = true; - this.push(null); - } - - var r = new Readable({ read: _read }); - r.resume(); - - setTimeout(function() { - t.equal(r._read, _read); - t.ok(_readCalled); - }); - }); -} diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js deleted file mode 100644 index 474168aad4..0000000000 --- a/test/browser/test-stream-readable-event.js +++ /dev/null @@ -1,114 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../').Readable; - -function first(t) { - // First test, not reading when the readable is added. - // make sure that on('readable', ...) triggers a readable event. - var r = new Readable({ - highWaterMark: 3 - }); - - var _readCalled = false; - r._read = function(n) { - _readCalled = true; - }; - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')); - - var caughtReadable = false; - setTimeout(function() { - // we're testing what we think we are - t.notOk(r._readableState.reading); - r.on('readable', function() { - caughtReadable = true; - setTimeout(function() { - // we're testing what we think we are - t.notOk(_readCalled); - - t.ok(caughtReadable); - t.end(); - }); - }); - }); - - -} - -function second(t) { - // second test, make sure that readable is re-emitted if there's - // already a length, while it IS reading. - - var r = new Readable({ - highWaterMark: 3 - }); - - var _readCalled = false; - r._read = function(n) { - _readCalled = true; - }; - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('bl')); - - var caughtReadable = false; - setTimeout(function() { - // assert we're testing what we think we are - t.ok(r._readableState.reading); - r.on('readable', function() { - caughtReadable = true; - setTimeout(function() { - // we're testing what we think we are - t.ok(_readCalled); - - t.ok(caughtReadable); - t.end(); - }); - }); - }); - -} - -function third(t) { - // Third test, not reading when the stream has not passed - // the highWaterMark but *has* reached EOF. - var r = new Readable({ - highWaterMark: 30 - }); - - var _readCalled = false; - r._read = function(n) { - _readCalled = true; - }; - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')); - r.push(null); - - var caughtReadable = false; - setTimeout(function() { - // assert we're testing what we think we are - t.notOk(r._readableState.reading); - r.on('readable', function() { - caughtReadable = true; - setTimeout(function() { - // we're testing what we think we are - t.notOk(_readCalled); - - t.ok(caughtReadable); - t.end(); - }); - }); - }); - -}; - -module.exports = function (t) { - t.test('readable events', function (t) { - t.test('first', first); - t.test('second', second); - t.test('third', third); - }); -} diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js deleted file mode 100644 index c07c1672d2..0000000000 --- a/test/browser/test-stream-sync-write.js +++ /dev/null @@ -1,39 +0,0 @@ -require('../common'); -var inherits = require('inherits'); -var stream = require('../../'); -var WritableStream = stream.Writable; -module.exports = function(t) { - t.test('should bea ble to write sync', function(t) { - var InternalStream = function() { - WritableStream.call(this); - }; - inherits(InternalStream, WritableStream); - - InternalStream.prototype._write = function(chunk, encoding, callback) { - callback(); - }; - - var internalStream = new InternalStream(); - - - - var ExternalStream = function(writable) { - this._writable = writable; - WritableStream.call(this); - }; - inherits(ExternalStream, WritableStream); - - ExternalStream.prototype._write = function(chunk, encoding, callback) { - this._writable.write(chunk, encoding, callback); - }; - - - - var externalStream = new ExternalStream(internalStream); - - for (var i = 0; i < 2000; i++) { - externalStream.write(i.toString()); - } - t.end(); - }); -} diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js deleted file mode 100644 index c820f8cac7..0000000000 --- a/test/browser/test-stream-transform-constructor-set-methods.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Transform = require('../../').Transform; -module.exports = function (t) { - t.test('transform constructor set methods', function (t) { - var _transformCalled = false; - function _transform(d, e, n) { - _transformCalled = true; - n(); - } - - var _flushCalled = false; - function _flush(n) { - _flushCalled = true; - n(); - } - - var tr = new Transform({ - transform: _transform, - flush: _flush - }); - - tr.end(Buffer.from('blerg')); - tr.resume(); - - tr.on('end', function() { - t.equal(tr._transform, _transform); - t.equal(tr._flush, _flush); - t.ok(_transformCalled); - t.ok(_flushCalled); - t.end(); - }); - }); -} diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js deleted file mode 100644 index 3b226a7c26..0000000000 --- a/test/browser/test-stream-transform-objectmode-falsey-value.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict'; -var common = require('../common'); - -var stream = require('../../'); -var PassThrough = stream.PassThrough; -module.exports = function (t) { - t.test('transform objectmode falsey value', function (t) { - var src = new PassThrough({ objectMode: true }); - var tx = new PassThrough({ objectMode: true }); - var dest = new PassThrough({ objectMode: true }); - - var expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]; - var results = []; - dest.on('end', function() { - t.deepEqual(results, expect); - t.end(); - }); - - dest.on('data', function(x) { - results.push(x); - }); - - src.pipe(tx).pipe(dest); - - var i = -1; - var int = setInterval(function() { - if (i > 10) { - src.end(); - clearInterval(int); - } else { - t.ok(true); - src.write(i++); - } - }, 10); - }); -} diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js deleted file mode 100644 index 51e7ff649a..0000000000 --- a/test/browser/test-stream-transform-split-objectmode.js +++ /dev/null @@ -1,58 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Transform = require('../../').Transform; -module.exports = function (t) { - t.test('transform split objectmode', function (t) { - t.plan(10); - var parser = new Transform({ readableObjectMode : true }); - - t.ok(parser._readableState.objectMode, 'parser 1'); - t.notOk(parser._writableState.objectMode, 'parser 2'); - t.equals(parser._readableState.highWaterMark, 16, 'parser 3'); - t.equals(parser._writableState.highWaterMark, (16 * 1024), 'parser 4'); - - parser._transform = function(chunk, enc, callback) { - callback(null, { val : chunk[0] }); - }; - - var parsed; - - parser.on('data', function(obj) { - parsed = obj; - }); - - parser.end(Buffer.from([42])); - - parser.on('end', function() { - t.equals(parsed.val, 42, 'parser ended'); - }); - - - var serializer = new Transform({ writableObjectMode : true }); - - t.notOk(serializer._readableState.objectMode, 'serializer 1'); - t.ok(serializer._writableState.objectMode, 'serializer 2'); - t.equals(serializer._readableState.highWaterMark, (16 * 1024), 'serializer 3'); - t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4'); - - serializer._transform = function(obj, _, callback) { - callback(null, Buffer.from([obj.val])); - }; - - var serialized; - - serializer.on('data', function(chunk) { - serialized = chunk; - }); - - serializer.write({ val : 42 }); - - serializer.on('end', function() { - t.equals(serialized[0], 42, 'searlizer ended'); - }); - setImmediate(function () { - serializer.end(); - }); - }); -} diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js deleted file mode 100644 index 61f9db83a0..0000000000 --- a/test/browser/test-stream-unshift-empty-chunk.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict'; -var common = require('../common'); - -// This test verifies that stream.unshift(Buffer(0)) or -// stream.unshift('') does not set state.reading=false. -var Readable = require('../../').Readable; -module.exports = function (t) { - t.test('unshift empty chunk', function (t) { - t.plan(1); - var r = new Readable(); - var nChunks = 10; - var chunk = Buffer.alloc(10); - chunk.fill('x'); - - r._read = function(n) { - setTimeout(function() { - r.push(--nChunks === 0 ? null : chunk); - }); - }; - - var readAll = false; - var seen = []; - r.on('readable', function() { - var chunk; - while (chunk = r.read()) { - seen.push(chunk.toString()); - // simulate only reading a certain amount of the data, - // and then putting the rest of the chunk back into the - // stream, like a parser might do. We just fill it with - // 'y' so that it's easy to see which bits were touched, - // and which were not. - var putBack = Buffer.alloc(readAll ? 0 : 5); - putBack.fill('y'); - readAll = !readAll; - r.unshift(putBack); - } - }); - - var expect = - [ 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy' ]; - - r.on('end', function() { - t.deepEqual(seen, expect); - }); - }); -} diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js deleted file mode 100644 index 16a3966bec..0000000000 --- a/test/browser/test-stream-unshift-read-race.js +++ /dev/null @@ -1,110 +0,0 @@ -'use strict'; -var common = require('../common'); - -// This test verifies that: -// 1. unshift() does not cause colliding _read() calls. -// 2. unshift() after the 'end' event is an error, but after the EOF -// signalling null, it is ok, and just creates a new readable chunk. -// 3. push() after the EOF signaling null is an error. -// 4. _read() is not called after pushing the EOF null chunk. - -var stream = require('../../'); -module.exports = function (t) { - t.test('unshift read race', function (tape) { - var hwm = 10; - var r = stream.Readable({ highWaterMark: hwm }); - var chunks = 10; - var t = (chunks * 5); - - var data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2)); - for (var i = 0; i < data.length; i++) { - var c = 'asdf'.charCodeAt(i % 4); - data[i] = c; - } - - var pos = 0; - var pushedNull = false; - r._read = function(n) { - tape.notOk(pushedNull, '_read after null push'); - - // every third chunk is fast - push(!(chunks % 3)); - - function push(fast) { - tape.notOk(pushedNull, 'push() after null push'); - var c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length)); - pushedNull = c === null; - if (fast) { - pos += n; - r.push(c); - if (c === null) pushError(); - } else { - setTimeout(function() { - pos += n; - r.push(c); - if (c === null) pushError(); - }); - } - } - }; - - function pushError() { - tape.throws(function() { - r.push(Buffer.alloc(1)); - }); - } - - - var w = stream.Writable(); - var written = []; - w._write = function(chunk, encoding, cb) { - written.push(chunk.toString()); - cb(); - }; - - var ended = false; - r.on('end', function() { - tape.notOk(ended, 'end emitted more than once'); - tape.throws(function() { - r.unshift(Buffer.alloc(1)); - }); - ended = true; - w.end(); - }); - - r.on('readable', function() { - var chunk; - while (null !== (chunk = r.read(10))) { - w.write(chunk); - if (chunk.length > 4) - r.unshift(Buffer.from('1234')); - } - }); - - w.on('finish', function() { - // each chunk should start with 1234, and then be asfdasdfasdf... - // The first got pulled out before the first unshift('1234'), so it's - // lacking that piece. - tape.equal(written[0], 'asdfasdfas'); - var asdf = 'd'; - //console.error('0: %s', written[0]); - for (var i = 1; i < written.length; i++) { - //console.error('%s: %s', i.toString(32), written[i]); - tape.equal(written[i].slice(0, 4), '1234'); - for (var j = 4; j < written[i].length; j++) { - var c = written[i].charAt(j); - tape.equal(c, asdf); - switch (asdf) { - case 'a': asdf = 's'; break; - case 's': asdf = 'd'; break; - case 'd': asdf = 'f'; break; - case 'f': asdf = 'a'; break; - } - } - } - tape.equal(written.length, 18); - tape.end(); - }); - - }); -} diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js deleted file mode 100644 index de657152af..0000000000 --- a/test/browser/test-stream-writable-change-default-encoding.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; -var common = require('../common'); - -var stream = require('../../'); -var inherits = require('inherits'); - -function MyWritable(fn, options) { - stream.Writable.call(this, options); - this.fn = fn; -}; - -inherits(MyWritable, stream.Writable); - -MyWritable.prototype._write = function(chunk, encoding, callback) { - this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); - callback(); -}; - -function defaultCondingIsUtf8(t) { - t.plan(1); - var m = new MyWritable(function(isBuffer, type, enc) { - t.equal(enc, 'utf8'); - }, { decodeStrings: false }); - m.write('foo'); - m.end(); -} - -function changeDefaultEncodingToAscii(t) { - t.plan(1); - var m = new MyWritable(function(isBuffer, type, enc) { - t.equal(enc, 'ascii'); - }, { decodeStrings: false }); - m.setDefaultEncoding('ascii'); - m.write('bar'); - m.end(); -} - -function changeDefaultEncodingToInvalidValue(t) { - t.plan(1); - t.throws(function () { - var m = new MyWritable(function(isBuffer, type, enc) { - }, { decodeStrings: false }); - m.setDefaultEncoding({}); - m.write('bar'); - m.end(); - }, TypeError); -} -function checkVairableCaseEncoding(t) { - t.plan(1); - var m = new MyWritable(function(isBuffer, type, enc) { - t.equal(enc, 'ascii'); - }, { decodeStrings: false }); - m.setDefaultEncoding('AsCii'); - m.write('bar'); - m.end(); -} -module.exports = function (t) { - t.test('writable change default encoding', function (t) { - t.test('defaultCondingIsUtf8', defaultCondingIsUtf8); - t.test('changeDefaultEncodingToAscii', changeDefaultEncodingToAscii); - t.test('changeDefaultEncodingToInvalidValue', changeDefaultEncodingToInvalidValue); - t.test('checkVairableCaseEncoding', checkVairableCaseEncoding); - }); -} diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js deleted file mode 100644 index e39bdbfaa6..0000000000 --- a/test/browser/test-stream-writable-constructor-set-methods.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict'; -var common = require('../common'); -var Writable = require('../../').Writable; - -module.exports = function (t) { - t.test('writable constructor set methods', function (t){ - - - var _writeCalled = false; - function _write(d, e, n) { - _writeCalled = true; - } - - var w = new Writable({ write: _write }); - w.end(Buffer.from('blerg')); - - var _writevCalled = false; - var dLength = 0; - function _writev(d, n) { - dLength = d.length; - _writevCalled = true; - } - - var w2 = new Writable({ writev: _writev }); - w2.cork(); - - w2.write(Buffer.from('blerg')); - w2.write(Buffer.from('blerg')); - w2.end(); - - setImmediate(function() { - t.equal(w._write, _write); - t.ok(_writeCalled); - t.equal(w2._writev, _writev); - t.equal(dLength, 2); - t.ok(_writevCalled); - t.end(); - }); - }); -} diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js deleted file mode 100644 index f32dd7ef63..0000000000 --- a/test/browser/test-stream-writable-decoded-encoding.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; -var common = require('../common'); - -var stream = require('../../'); -var inherits = require('inherits'); - -function MyWritable(fn, options) { - stream.Writable.call(this, options); - this.fn = fn; -}; - -inherits(MyWritable, stream.Writable); - -MyWritable.prototype._write = function(chunk, encoding, callback) { - this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); - callback(); -}; - -function decodeStringsTrue(t) { - t.plan(3); - var m = new MyWritable(function(isBuffer, type, enc) { - t.ok(isBuffer); - t.equal(type, 'object'); - t.equal(enc, 'buffer'); - //console.log('ok - decoded string is decoded'); - }, { decodeStrings: true }); - m.write('some-text', 'utf8'); - m.end(); -} - -function decodeStringsFalse(t) { - t.plan(3); - var m = new MyWritable(function(isBuffer, type, enc) { - t.notOk(isBuffer); - t.equal(type, 'string'); - t.equal(enc, 'utf8'); - //console.log('ok - un-decoded string is not decoded'); - }, { decodeStrings: false }); - m.write('some-text', 'utf8'); - m.end(); -} -module.exports = function (t) { - t.test('decodeStringsTrue', decodeStringsTrue); - t.test('decodeStringsFalse', decodeStringsFalse); -} diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js deleted file mode 100644 index b5e8b61383..0000000000 --- a/test/browser/test-stream-writev.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; -var common = require('../common'); - -var stream = require('../../'); - -var queue = []; -for (var decode = 0; decode < 2; decode++) { - for (var uncork = 0; uncork < 2; uncork++) { - for (var multi = 0; multi < 2; multi++) { - queue.push([!!decode, !!uncork, !!multi]); - } - } -} - -module.exports = function (t) { - t.test('writev', function (t) { - queue.forEach(function (tr, i){ - t.test('round ' + i, test(tr[0], tr[1], tr[2])); - }); - }); -} - -function test(decode, uncork, multi) { - return function (t) { - //console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi); - var counter = 0; - var expectCount = 0; - function cnt(msg) { - expectCount++; - var expect = expectCount; - var called = false; - return function(er) { - if (er) - throw er; - called = true; - counter++; - t.equal(counter, expect); - }; - } - - var w = new stream.Writable({ decodeStrings: decode }); - w._write = function(chunk, e, cb) { - t.ok(false, 'Should not call _write'); - }; - - var expectChunks = decode ? - [ - { encoding: 'buffer', - chunk: [104, 101, 108, 108, 111, 44, 32] }, - { encoding: 'buffer', - chunk: [119, 111, 114, 108, 100] }, - { encoding: 'buffer', - chunk: [33] }, - { encoding: 'buffer', - chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, - { encoding: 'buffer', - chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]} - ] : [ - { encoding: 'ascii', chunk: 'hello, ' }, - { encoding: 'utf8', chunk: 'world' }, - { encoding: 'buffer', chunk: [33] }, - { encoding: 'binary', chunk: '\nand then...' }, - { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' } - ]; - - var actualChunks; - w._writev = function(chunks, cb) { - actualChunks = chunks.map(function(chunk) { - return { - encoding: chunk.encoding, - chunk: Buffer.isBuffer(chunk.chunk) ? - Array.prototype.slice.call(chunk.chunk) : chunk.chunk - }; - }); - cb(); - }; - - w.cork(); - w.write('hello, ', 'ascii', cnt('hello')); - w.write('world', 'utf8', cnt('world')); - - if (multi) - w.cork(); - - w.write(Buffer.from('!'), 'buffer', cnt('!')); - w.write('\nand then...', 'binary', cnt('and then')); - - if (multi) - w.uncork(); - - w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')); - - if (uncork) - w.uncork(); - - w.end(cnt('end')); - - w.on('finish', function() { - // make sure finish comes after all the write cb - cnt('finish')(); - t.deepEqual(expectChunks, actualChunks); - t.end(); - }); - } -} diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js deleted file mode 100644 index 86b66f81d5..0000000000 --- a/test/browser/test-stream2-base64-single-char-read-end.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; -var common = require('../common'); -var R = require('../../lib/_stream_readable'); -var W = require('../../lib/_stream_writable'); -module.exports = function (t) { - t.test('base64 single char read end', function (t) { - t.plan(1); - var src = new R({encoding: 'base64'}); - var dst = new W(); - var hasRead = false; - var accum = []; - var timeout; - - src._read = function(n) { - if(!hasRead) { - hasRead = true; - process.nextTick(function() { - src.push(Buffer.from('1')); - src.push(null); - }); - }; - }; - - dst._write = function(chunk, enc, cb) { - accum.push(chunk); - cb(); - }; - - src.on('end', function() { - t.equal(Buffer.concat(accum) + '', 'MQ=='); - clearTimeout(timeout); - }); - - src.pipe(dst); - - timeout = setTimeout(function() { - assert.fail('timed out waiting for _write'); - }, 100); - -}) -} diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js deleted file mode 100644 index 954473b8b7..0000000000 --- a/test/browser/test-stream2-compatibility.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict'; -var R = require('../../lib/_stream_readable'); -var inherits = require('inherits'); -var EE = require('events').EventEmitter; -module.exports = function (t) { - t.test('compatibility', function (t) { - t.plan(1); - - var ondataCalled = 0; - - function TestReader() { - R.apply(this); - this._buffer = Buffer.alloc(100); - this._buffer.fill('x'); - - this.on('data', function() { - ondataCalled++; - }); - } - - inherits(TestReader, R); - - TestReader.prototype._read = function(n) { - this.push(this._buffer); - this._buffer = Buffer.alloc(0); - }; - - var reader = new TestReader(); - setTimeout(function() { - t.equal(ondataCalled, 1); - }); - }); -} diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js deleted file mode 100644 index 02539a0b91..0000000000 --- a/test/browser/test-stream2-large-read-stall.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict'; -var common = require('../common'); -module.exports = function (t) { - t.test('large object read stall', function (t) { - -// If everything aligns so that you do a read(n) of exactly the -// remaining buffer, then make sure that 'end' still emits. - - var READSIZE = 100; - var PUSHSIZE = 20; - var PUSHCOUNT = 1000; - var HWM = 50; - - var Readable = require('../../').Readable; - var r = new Readable({ - highWaterMark: HWM - }); - var rs = r._readableState; - - r._read = push; - - r.on('readable', function() { - ;false && console.error('>> readable'); - do { - ;false && console.error(' > read(%d)', READSIZE); - var ret = r.read(READSIZE); - ;false && console.error(' < %j (%d remain)', ret && ret.length, rs.length); - } while (ret && ret.length === READSIZE); - - ;false && console.error('<< after read()', - ret && ret.length, - rs.needReadable, - rs.length); - }); - - var endEmitted = false; - r.on('end', function() { - t.equal(pushes, PUSHCOUNT + 1); - t.end(); - ;false && console.error('end'); - }); - - var pushes = 0; - function push() { - if (pushes > PUSHCOUNT) - return; - - if (pushes++ === PUSHCOUNT) { - ;false && console.error(' push(EOF)'); - return r.push(null); - } - - ;false && console.error(' push #%d', pushes); - if (r.push(Buffer.alloc(PUSHSIZE))) - setTimeout(push); - } - - // start the flow - var ret = r.read(0); - - }); -} diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js deleted file mode 100644 index 26a038b599..0000000000 --- a/test/browser/test-stream2-objects.js +++ /dev/null @@ -1,306 +0,0 @@ -'use strict'; -var common = require('../common'); -var Readable = require('../../lib/_stream_readable'); -var Writable = require('../../lib/_stream_writable'); - -module.exports = function (t) { - - - - function toArray(callback) { - var stream = new Writable({ objectMode: true }); - var list = []; - stream.write = function(chunk) { - list.push(chunk); - }; - - stream.end = function() { - callback(list); - }; - - return stream; - } - - function fromArray(list) { - var r = new Readable({ objectMode: true }); - r._read = noop; - forEach(list, function(chunk) { - r.push(chunk); - }); - r.push(null); - - return r; - } - - function noop() {} - - t.test('can read objects from stream', function(t) { - var r = fromArray([{ one: '1'}, { two: '2' }]); - - var v1 = r.read(); - var v2 = r.read(); - var v3 = r.read(); - - t.deepEqual(v1, { one: '1' }); - t.deepEqual(v2, { two: '2' }); - t.deepEqual(v3, null); - - t.end(); - }); - - t.test('can pipe objects into stream', function(t) { - var r = fromArray([{ one: '1'}, { two: '2' }]); - - r.pipe(toArray(function(list) { - t.deepEqual(list, [ - { one: '1' }, - { two: '2' } - ]); - - t.end(); - })); - }); - - t.test('read(n) is ignored', function(t) { - var r = fromArray([{ one: '1'}, { two: '2' }]); - - var value = r.read(2); - - t.deepEqual(value, { one: '1' }); - - t.end(); - }); - - t.test('can read objects from _read (sync)', function(t) { - var r = new Readable({ objectMode: true }); - var list = [{ one: '1'}, { two: '2' }]; - r._read = function(n) { - var item = list.shift(); - r.push(item || null); - }; - - r.pipe(toArray(function(list) { - t.deepEqual(list, [ - { one: '1' }, - { two: '2' } - ]); - - t.end(); - })); - }); - - t.test('can read objects from _read (async)', function(t) { - var r = new Readable({ objectMode: true }); - var list = [{ one: '1'}, { two: '2' }]; - r._read = function(n) { - var item = list.shift(); - process.nextTick(function() { - r.push(item || null); - }); - }; - - r.pipe(toArray(function(list) { - t.deepEqual(list, [ - { one: '1' }, - { two: '2' } - ]); - - t.end(); - })); - }); - - t.test('can read strings as objects', function(t) { - var r = new Readable({ - objectMode: true - }); - r._read = noop; - var list = ['one', 'two', 'three']; - forEach(list, function(str) { - r.push(str); - }); - r.push(null); - - r.pipe(toArray(function(array) { - t.deepEqual(array, list); - - t.end(); - })); - }); - - t.test('read(0) for object streams', function(t) { - var r = new Readable({ - objectMode: true - }); - r._read = noop; - - r.push('foobar'); - r.push(null); - - var v = r.read(0); - - r.pipe(toArray(function(array) { - t.deepEqual(array, ['foobar']); - - t.end(); - })); - }); - - t.test('falsey values', function(t) { - var r = new Readable({ - objectMode: true - }); - r._read = noop; - - r.push(false); - r.push(0); - r.push(''); - r.push(null); - - r.pipe(toArray(function(array) { - t.deepEqual(array, [false, 0, '']); - - t.end(); - })); - }); - - t.test('high watermark _read', function(t) { - var r = new Readable({ - highWaterMark: 6, - objectMode: true - }); - var calls = 0; - var list = ['1', '2', '3', '4', '5', '6', '7', '8']; - - r._read = function(n) { - calls++; - }; - - forEach(list, function(c) { - r.push(c); - }); - - var v = r.read(); - - t.equal(calls, 0); - t.equal(v, '1'); - - var v2 = r.read(); - t.equal(v2, '2'); - - var v3 = r.read(); - t.equal(v3, '3'); - - t.equal(calls, 1); - - t.end(); - }); - - t.test('high watermark push', function(t) { - var r = new Readable({ - highWaterMark: 6, - objectMode: true - }); - r._read = function(n) {}; - for (var i = 0; i < 6; i++) { - var bool = r.push(i); - t.equal(bool, i === 5 ? false : true); - } - - t.end(); - }); - - t.test('can write objects to stream', function(t) { - var w = new Writable({ objectMode: true }); - - w._write = function(chunk, encoding, cb) { - t.deepEqual(chunk, { foo: 'bar' }); - cb(); - }; - - w.on('finish', function() { - t.end(); - }); - - w.write({ foo: 'bar' }); - w.end(); - }); - - t.test('can write multiple objects to stream', function(t) { - var w = new Writable({ objectMode: true }); - var list = []; - - w._write = function(chunk, encoding, cb) { - list.push(chunk); - cb(); - }; - - w.on('finish', function() { - t.deepEqual(list, [0, 1, 2, 3, 4]); - - t.end(); - }); - - w.write(0); - w.write(1); - w.write(2); - w.write(3); - w.write(4); - w.end(); - }); - - t.test('can write strings as objects', function(t) { - var w = new Writable({ - objectMode: true - }); - var list = []; - - w._write = function(chunk, encoding, cb) { - list.push(chunk); - process.nextTick(cb); - }; - - w.on('finish', function() { - t.deepEqual(list, ['0', '1', '2', '3', '4']); - - t.end(); - }); - - w.write('0'); - w.write('1'); - w.write('2'); - w.write('3'); - w.write('4'); - w.end(); - }); - - t.test('buffers finish until cb is called', function(t) { - var w = new Writable({ - objectMode: true - }); - var called = false; - - w._write = function(chunk, encoding, cb) { - t.equal(chunk, 'foo'); - - process.nextTick(function() { - called = true; - cb(); - }); - }; - - w.on('finish', function() { - t.equal(called, true); - - t.end(); - }); - - w.write('foo'); - w.end(); - }); - - function forEach (xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } - } -}; diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js deleted file mode 100644 index ba212f7dcb..0000000000 --- a/test/browser/test-stream2-pipe-error-handling.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict'; -var common = require('../common'); -var assert = require('assert'); -var stream = require('../../'); -module.exports = function (t) { - t.test('Error Listener Catches', function (t) { - var count = 1000; - - var source = new stream.Readable(); - source._read = function(n) { - n = Math.min(count, n); - count -= n; - source.push(Buffer.alloc(n)); - }; - - var unpipedDest; - source.unpipe = function(dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - var dest = new stream.Writable(); - dest._write = function(chunk, encoding, cb) { - cb(); - }; - - source.pipe(dest); - - var gotErr = null; - dest.on('error', function(err) { - gotErr = err; - }); - - var unpipedSource; - dest.on('unpipe', function(src) { - unpipedSource = src; - }); - - var err = new Error('This stream turned into bacon.'); - dest.emit('error', err); - t.strictEqual(gotErr, err); - t.strictEqual(unpipedSource, source); - t.strictEqual(unpipedDest, dest); - t.end(); - }); - - t.test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { - var count = 1000; - - var source = new stream.Readable(); - source._read = function(n) { - n = Math.min(count, n); - count -= n; - source.push(Buffer.alloc(n)); - }; - - var unpipedDest; - source.unpipe = function(dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - var dest = new stream.Writable(); - dest._write = function(chunk, encoding, cb) { - cb(); - }; - - source.pipe(dest); - - var unpipedSource; - dest.on('unpipe', function(src) { - unpipedSource = src; - }); - - var err = new Error('This stream turned into bacon.'); - - var gotErr = null; - try { - dest.emit('error', err); - } catch (e) { - gotErr = e; - } - t.strictEqual(gotErr, err); - t.strictEqual(unpipedSource, source); - t.strictEqual(unpipedDest, dest); - t.end(); - }); -} diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js deleted file mode 100644 index 5f4a4e2686..0000000000 --- a/test/browser/test-stream2-pipe-error-once-listener.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; -var common = require('../common'); - -var inherits = require('inherits'); -var stream = require('../../'); - -module.exports = function (t) { - t.test('pipe error once listener', function (t){ - t.plan(1); - var Read = function() { - stream.Readable.call(this); - }; - inherits(Read, stream.Readable); - - Read.prototype._read = function(size) { - this.push('x'); - this.push(null); - }; - - - var Write = function() { - stream.Writable.call(this); - }; - inherits(Write, stream.Writable); - - Write.prototype._write = function(buffer, encoding, cb) { - this.emit('error', new Error('boom')); - this.emit('alldone'); - }; - - var read = new Read(); - var write = new Write(); - - write.once('error', function(err) {}); - write.once('alldone', function(err) { - t.ok(true); - }); - - read.pipe(write); - }); -} diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js deleted file mode 100644 index 7ca5f39ef3..0000000000 --- a/test/browser/test-stream2-push.js +++ /dev/null @@ -1,120 +0,0 @@ -'use strict'; -var common = require('../common'); -var stream = require('../../'); -var Readable = stream.Readable; -var Writable = stream.Writable; - - -var inherits = require('inherits'); -var EE = require('events').EventEmitter; -module.exports = function (t) { - -// a mock thing a bit like the net.Socket/tcp_wrap.handle interaction - t.test('push', function (t) { - var stream = new Readable({ - highWaterMark: 16, - encoding: 'utf8' - }); - - var source = new EE(); - - stream._read = function() { - //console.error('stream._read'); - readStart(); - }; - - var ended = false; - stream.on('end', function() { - ended = true; - }); - - source.on('data', function(chunk) { - var ret = stream.push(chunk); - //console.error('data', stream._readableState.length); - if (!ret) - readStop(); - }); - - source.on('end', function() { - stream.push(null); - }); - - var reading = false; - - function readStart() { - //console.error('readStart'); - reading = true; - } - - function readStop() { - //console.error('readStop'); - reading = false; - process.nextTick(function() { - var r = stream.read(); - if (r !== null) - writer.write(r); - }); - } - - var writer = new Writable({ - decodeStrings: false - }); - - var written = []; - - var expectWritten = - [ 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg' ]; - - writer._write = function(chunk, encoding, cb) { - //console.error('WRITE %s', chunk); - written.push(chunk); - process.nextTick(cb); - }; - - writer.on('finish', finish); - - - // now emit some chunks. - - var chunk = 'asdfg'; - - var set = 0; - readStart(); - data(); - function data() { - t.ok(reading); - source.emit('data', chunk); - t.ok(reading); - source.emit('data', chunk); - t.ok(reading); - source.emit('data', chunk); - t.ok(reading); - source.emit('data', chunk); - t.notOk(reading); - if (set++ < 5) - setTimeout(data, 10); - else - end(); - } - - function finish() { - //console.error('finish'); - t.deepEqual(written, expectWritten); - t.end(); - } - - function end() { - source.emit('end'); - t.notOk(reading); - writer.end(stream.read()); - setTimeout(function() { - t.ok(ended); - }); - } - }); -}; diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js deleted file mode 100644 index fd86d67bc5..0000000000 --- a/test/browser/test-stream2-readable-empty-buffer-no-eof.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../').Readable; - -module.exports = function (t) { - t.test('readable empty buffer no eof 1', function (t) { - t.plan(1); - var r = new Readable(); - - // should not end when we get a Buffer(0) or '' as the _read result - // that just means that there is *temporarily* no data, but to go - // ahead and try again later. - // - // note that this is very unusual. it only works for crypto streams - // because the other side of the stream will call read(0) to cycle - // data through openssl. that's why we set the timeouts to call - // r.read(0) again later, otherwise there is no more work being done - // and the process just exits. - - var buf = Buffer.alloc(5); - buf.fill('x'); - var reads = 5; - r._read = function(n) { - switch (reads--) { - case 0: - return r.push(null); // EOF - case 1: - return r.push(buf); - case 2: - setTimeout(r.read.bind(r, 0), 50); - return r.push(Buffer.alloc(0)); // Not-EOF! - case 3: - setTimeout(r.read.bind(r, 0), 50); - return process.nextTick(function() { - return r.push(Buffer.alloc(0)); - }); - case 4: - setTimeout(r.read.bind(r, 0), 50); - return setTimeout(function() { - return r.push(Buffer.alloc(0)); - }); - case 5: - return setTimeout(function() { - return r.push(buf); - }); - default: - throw new Error('unreachable'); - } - }; - - var results = []; - function flow() { - var chunk; - while (null !== (chunk = r.read())) - results.push(chunk + ''); - } - r.on('readable', flow); - r.on('end', function() { - results.push('EOF'); - t.deepEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]); - }); - flow(); - - }); - - t.test('readable empty buffer no eof 2', function (t) { - t.plan(1); - var r = new Readable({ encoding: 'base64' }); - var reads = 5; - r._read = function(n) { - if (!reads--) - return r.push(null); // EOF - else - return r.push(Buffer.from('x')); - }; - - var results = []; - function flow() { - var chunk; - while (null !== (chunk = r.read())) - results.push(chunk + ''); - } - r.on('readable', flow); - r.on('end', function() { - results.push('EOF'); - t.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]); - }); - flow(); - }); -} diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js deleted file mode 100644 index 7522b05692..0000000000 --- a/test/browser/test-stream2-readable-from-list.js +++ /dev/null @@ -1,68 +0,0 @@ -// Flags: --expose_internals -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ -require('../common'); -var fromList = require('../../lib/_stream_readable')._fromList; -var BufferList = require('../../lib/internal/streams/BufferList'); - -function bufferListFromArray(arr) { - var bl = new BufferList(); - for (var i = 0; i < arr.length; ++i) { - bl.push(arr[i]); - }return bl; -} - -module.exports = function (t) { - t.test('buffers', function (t) { - var list = [bufferShim.from('foog'), bufferShim.from('bark'), bufferShim.from('bazy'), bufferShim.from('kuel')]; - list = bufferListFromArray(list); - - // read more than the first element. - var ret = fromList(6, { buffer: list, length: 16 }); - t.equal(ret.toString(), 'foogba'); - - // read exactly the first element. - ret = fromList(2, { buffer: list, length: 10 }); - t.equal(ret.toString(), 'rk'); - - // read less than the first element. - ret = fromList(2, { buffer: list, length: 8 }); - t.equal(ret.toString(), 'ba'); - - // read more than we have. - ret = fromList(100, { buffer: list, length: 6 }); - t.equal(ret.toString(), 'zykuel'); - - // all consumed. - t.same(list, new BufferList()); - - t.end(); - }); - - t.test('strings', function (t) { - var list = ['foog', 'bark', 'bazy', 'kuel']; - list = bufferListFromArray(list); - - // read more than the first element. - var ret = fromList(6, { buffer: list, length: 16, decoder: true }); - t.equal(ret, 'foogba'); - - // read exactly the first element. - ret = fromList(2, { buffer: list, length: 10, decoder: true }); - t.equal(ret, 'rk'); - - // read less than the first element. - ret = fromList(2, { buffer: list, length: 8, decoder: true }); - t.equal(ret, 'ba'); - - // read more than we have. - ret = fromList(100, { buffer: list, length: 6, decoder: true }); - t.equal(ret, 'zykuel'); - - // all consumed. - t.same(list, new BufferList()); - - t.end(); - }); -} diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js deleted file mode 100644 index 30fd350f97..0000000000 --- a/test/browser/test-stream2-readable-legacy-drain.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Stream = require('../../'); -var Readable = require('../../').Readable; -module.exports = function (t) { - t.test('readable legacy drain', function (t) { - var r = new Readable(); - var N = 256; - var reads = 0; - r._read = function(n) { - return r.push(++reads === N ? null : Buffer.alloc(1)); - }; - t.plan(2); - r.on('end', function() { - t.ok(true, 'rended'); - }); - - var w = new Stream(); - w.writable = true; - var writes = 0; - var buffered = 0; - w.write = function(c) { - writes += c.length; - buffered += c.length; - process.nextTick(drain); - return false; - }; - - function drain() { - if(buffered > 3) { - t.ok(false, 'to much buffer'); - } - buffered = 0; - w.emit('drain'); - } - - - w.end = function() { - t.ok(true, 'wended'); - }; - - // Just for kicks, let's mess with the drain count. - // This verifies that even if it gets negative in the - // pipe() cleanup function, we'll still function properly. - r.on('readable', function() { - w.emit('drain'); - }); - - r.pipe(w); -}); -} diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js deleted file mode 100644 index 2a6d4f0f32..0000000000 --- a/test/browser/test-stream2-readable-non-empty-end.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict'; -var common = require('../common'); -var Readable = require('../../lib/_stream_readable'); -module.exports = function (t) { - t.test('non empty end', function (t) { - t.plan(4); - var len = 0; - var chunks = new Array(10); - for (var i = 1; i <= 10; i++) { - chunks[i - 1] = Buffer.alloc(i); - len += i; - } - - var test = new Readable(); - var n = 0; - test._read = function(size) { - var chunk = chunks[n++]; - setTimeout(function() { - test.push(chunk === undefined ? null : chunk); - }); - }; - - test.on('end', thrower); - function thrower() { - throw new Error('this should not happen!'); - } - - var bytesread = 0; - test.on('readable', function() { - var b = len - bytesread - 1; - var res = test.read(b); - if (res) { - bytesread += res.length; - //console.error('br=%d len=%d', bytesread, len); - setTimeout(next); - } - test.read(0); - }); - test.read(0); - - function next() { - // now let's make 'end' happen - test.removeListener('end', thrower); - - test.on('end', function() { - t.ok(true, 'end emitted'); - }); - - // one to get the last byte - var r = test.read(); - t.ok(r); - t.equal(r.length, 1); - r = test.read(); - t.equal(r, null); - } - }); -} diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js deleted file mode 100644 index d13bbbadbe..0000000000 --- a/test/browser/test-stream2-readable-wrap-empty.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../lib/_stream_readable'); -var EE = require('events').EventEmitter; -module.exports = function (t) { - t.test('wrap empty', function (t) { - t.plan(1); - var oldStream = new EE(); - oldStream.pause = function() {}; - oldStream.resume = function() {}; - - var newStream = new Readable().wrap(oldStream); - - newStream - .on('readable', function() {}) - .on('end', function() { - t.ok(true, 'ended'); - }); - - oldStream.emit('end'); - - }) -} diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js deleted file mode 100644 index b50a786e5b..0000000000 --- a/test/browser/test-stream2-readable-wrap.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict'; -var common = require('../common'); - -var Readable = require('../../lib/_stream_readable'); -var Writable = require('../../lib/_stream_writable'); -var EE = require('events').EventEmitter; -var run = 0; -function runTest(t, highWaterMark, objectMode, produce) { - t.test('run #' + (++run), function (t) { - var old = new EE(); - var r = new Readable({ highWaterMark: highWaterMark, - objectMode: objectMode }); - t.equal(r, r.wrap(old)); - - var ended = false; - r.on('end', function() { - ended = true; - }); - - old.pause = function() { - //console.error('old.pause()'); - old.emit('pause'); - flowing = false; - }; - - old.resume = function() { - //console.error('old.resume()'); - old.emit('resume'); - flow(); - }; - - var flowing; - var chunks = 10; - var oldEnded = false; - var expected = []; - function flow() { - flowing = true; - while (flowing && chunks-- > 0) { - var item = produce(); - expected.push(item); - //console.log('old.emit', chunks, flowing); - old.emit('data', item); - //console.log('after emit', chunks, flowing); - } - if (chunks <= 0) { - oldEnded = true; - //console.log('old end', chunks, flowing); - old.emit('end'); - } - } - - var w = new Writable({ highWaterMark: highWaterMark * 2, - objectMode: objectMode }); - var written = []; - w._write = function(chunk, encoding, cb) { - //console.log('_write', chunk); - written.push(chunk); - setTimeout(cb); - }; - - w.on('finish', function() { - performAsserts(); - }); - - r.pipe(w); - - flow(); - - function performAsserts() { - t.ok(ended); - t.ok(oldEnded); - t.deepEqual(written, expected); - t.end(); - } - }); -} -module.exports = function (t) { - t.test('readable wrap', function (t) { - runTest(t, 100, false, function() { return Buffer.alloc(100); }); - runTest(t, 10, false, function() { return Buffer.from('xxxxxxxxxx'); }); - runTest(t, 1, true, function() { return { foo: 'bar' }; }); - - var objectChunks = [ 5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555 ]; - runTest(t, 1, true, function() { return objectChunks.shift(); }); - }); -} diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js deleted file mode 100644 index ba70748fa5..0000000000 --- a/test/browser/test-stream2-set-encoding.js +++ /dev/null @@ -1,317 +0,0 @@ -'use strict'; -var common = require('../common'); -var R = require('../../lib/_stream_readable'); -var util = { - inherits: require('inherits') -}; - -// tiny node-tap lookalike. -module.exports = function (t) { - var test = t.test; - ///// - - util.inherits(TestReader, R); - - function TestReader(n, opts) { - R.call(this, opts); - - this.pos = 0; - this.len = n || 100; - } - - TestReader.prototype._read = function(n) { - setTimeout(function() { - - if (this.pos >= this.len) { - // double push(null) to test eos handling - this.push(null); - return this.push(null); - } - - n = Math.min(n, this.len - this.pos); - if (n <= 0) { - // double push(null) to test eos handling - this.push(null); - return this.push(null); - } - - this.pos += n; - var ret = Buffer.alloc(n); - ret.fill('a'); - - //console.log('this.push(ret)', ret); - - return this.push(ret); - }.bind(this), 1); - }; - - test('setEncoding utf8', function(t) { - var tr = new TestReader(100); - tr.setEncoding('utf8'); - var out = []; - var expect = - [ 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - - test('setEncoding hex', function(t) { - var tr = new TestReader(100); - tr.setEncoding('hex'); - var out = []; - var expect = - [ '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - test('setEncoding hex with read(13)', function(t) { - var tr = new TestReader(100); - tr.setEncoding('hex'); - var out = []; - var expect = - [ '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' ]; - - tr.on('readable', function flow() { - //console.log('readable once'); - var chunk; - while (null !== (chunk = tr.read(13))) - out.push(chunk); - }); - - tr.on('end', function() { - //console.log('END'); - t.same(out, expect); - t.end(); - }); - }); - - test('setEncoding base64', function(t) { - var tr = new TestReader(100); - tr.setEncoding('base64'); - var out = []; - var expect = - [ 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - test('encoding: utf8', function(t) { - var tr = new TestReader(100, { encoding: 'utf8' }); - var out = []; - var expect = - [ 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - - test('encoding: hex', function(t) { - var tr = new TestReader(100, { encoding: 'hex' }); - var out = []; - var expect = - [ '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - test('encoding: hex with read(13)', function(t) { - var tr = new TestReader(100, { encoding: 'hex' }); - var out = []; - var expect = - [ '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(13))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - test('encoding: base64', function(t) { - var tr = new TestReader(100, { encoding: 'base64' }); - var out = []; - var expect = - [ 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' ]; - - tr.on('readable', function flow() { - var chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', function() { - t.same(out, expect); - t.end(); - }); - }); - - test('chainable', function(t) { - var tr = new TestReader(100); - t.equal(tr.setEncoding('utf8'), tr); - t.end(); - }); -} diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js deleted file mode 100644 index 338fcdc5c0..0000000000 --- a/test/browser/test-stream2-transform.js +++ /dev/null @@ -1,473 +0,0 @@ -'use strict'; -var common = require('../common'); -var PassThrough = require('../../lib/_stream_passthrough'); -var Transform = require('../../lib/_stream_transform'); - -///// -module.exports = function (t) { - t.test('writable side consumption', function(t) { - var tx = new Transform({ - highWaterMark: 10 - }); - - var transformed = 0; - tx._transform = function(chunk, encoding, cb) { - transformed += chunk.length; - tx.push(chunk); - cb(); - }; - - for (var i = 1; i <= 10; i++) { - tx.write(Buffer.alloc(i)); - } - tx.end(); - - t.equal(tx._readableState.length, 10); - t.equal(transformed, 10); - t.equal(tx._transformState.writechunk.length, 5); - t.same(tx._writableState.getBuffer().map(function(c) { - return c.chunk.length; - }), [6, 7, 8, 9, 10]); - - t.end(); - }); - - t.test('passthrough', function(t) { - var pt = new PassThrough(); - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - t.equal(pt.read(5).toString(), 'foogb'); - t.equal(pt.read(5).toString(), 'arkba'); - t.equal(pt.read(5).toString(), 'zykue'); - t.equal(pt.read(5).toString(), 'l'); - t.end(); - }); - - t.test('object passthrough', function(t) { - var pt = new PassThrough({ objectMode: true }); - - pt.write(1); - pt.write(true); - pt.write(false); - pt.write(0); - pt.write('foo'); - pt.write(''); - pt.write({ a: 'b'}); - pt.end(); - - t.equal(pt.read(), 1); - t.equal(pt.read(), true); - t.equal(pt.read(), false); - t.equal(pt.read(), 0); - t.equal(pt.read(), 'foo'); - t.equal(pt.read(), ''); - t.same(pt.read(), { a: 'b'}); - t.end(); - }); - - t.test('simple transform', function(t) { - var pt = new Transform(); - pt._transform = function(c, e, cb) { - var ret = Buffer.alloc(c.length); - ret.fill('x'); - pt.push(ret); - cb(); - }; - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - t.equal(pt.read(5).toString(), 'xxxxx'); - t.equal(pt.read(5).toString(), 'xxxxx'); - t.equal(pt.read(5).toString(), 'xxxxx'); - t.equal(pt.read(5).toString(), 'x'); - t.end(); - }); - - t.test('simple object transform', function(t) { - var pt = new Transform({ objectMode: true }); - pt._transform = function(c, e, cb) { - pt.push(JSON.stringify(c)); - cb(); - }; - - pt.write(1); - pt.write(true); - pt.write(false); - pt.write(0); - pt.write('foo'); - pt.write(''); - pt.write({ a: 'b'}); - pt.end(); - - t.equal(pt.read(), '1'); - t.equal(pt.read(), 'true'); - t.equal(pt.read(), 'false'); - t.equal(pt.read(), '0'); - t.equal(pt.read(), '"foo"'); - t.equal(pt.read(), '""'); - t.equal(pt.read(), '{"a":"b"}'); - t.end(); - }); - - t.test('async passthrough', function(t) { - var pt = new Transform(); - pt._transform = function(chunk, encoding, cb) { - setTimeout(function() { - pt.push(chunk); - cb(); - }, 10); - }; - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - pt.on('finish', function() { - t.equal(pt.read(5).toString(), 'foogb'); - t.equal(pt.read(5).toString(), 'arkba'); - t.equal(pt.read(5).toString(), 'zykue'); - t.equal(pt.read(5).toString(), 'l'); - t.end(); - }); - }); - - t.test('assymetric transform (expand)', function(t) { - var pt = new Transform(); - - // emit each chunk 2 times. - pt._transform = function(chunk, encoding, cb) { - setTimeout(function() { - pt.push(chunk); - setTimeout(function() { - pt.push(chunk); - cb(); - }, 10); - }, 10); - }; - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - pt.on('finish', function() { - t.equal(pt.read(5).toString(), 'foogf'); - t.equal(pt.read(5).toString(), 'oogba'); - t.equal(pt.read(5).toString(), 'rkbar'); - t.equal(pt.read(5).toString(), 'kbazy'); - t.equal(pt.read(5).toString(), 'bazyk'); - t.equal(pt.read(5).toString(), 'uelku'); - t.equal(pt.read(5).toString(), 'el'); - t.end(); - }); - }); - - t.test('assymetric transform (compress)', function(t) { - var pt = new Transform(); - - // each output is the first char of 3 consecutive chunks, - // or whatever's left. - pt.state = ''; - - pt._transform = function(chunk, encoding, cb) { - if (!chunk) - chunk = ''; - var s = chunk.toString(); - setTimeout(function() { - this.state += s.charAt(0); - if (this.state.length === 3) { - pt.push(Buffer.from(this.state)); - this.state = ''; - } - cb(); - }.bind(this), 10); - }; - - pt._flush = function(cb) { - // just output whatever we have. - pt.push(Buffer.from(this.state)); - this.state = ''; - cb(); - }; - - pt.write(Buffer.from('aaaa')); - pt.write(Buffer.from('bbbb')); - pt.write(Buffer.from('cccc')); - pt.write(Buffer.from('dddd')); - pt.write(Buffer.from('eeee')); - pt.write(Buffer.from('aaaa')); - pt.write(Buffer.from('bbbb')); - pt.write(Buffer.from('cccc')); - pt.write(Buffer.from('dddd')); - pt.write(Buffer.from('eeee')); - pt.write(Buffer.from('aaaa')); - pt.write(Buffer.from('bbbb')); - pt.write(Buffer.from('cccc')); - pt.write(Buffer.from('dddd')); - pt.end(); - - // 'abcdeabcdeabcd' - pt.on('finish', function() { - t.equal(pt.read(5).toString(), 'abcde'); - t.equal(pt.read(5).toString(), 'abcde'); - t.equal(pt.read(5).toString(), 'abcd'); - t.end(); - }); - }); - - // this tests for a stall when data is written to a full stream - // that has empty transforms. - t.test('complex transform', function(t) { - var count = 0; - var saved = null; - var pt = new Transform({highWaterMark:3}); - pt._transform = function(c, e, cb) { - if (count++ === 1) - saved = c; - else { - if (saved) { - pt.push(saved); - saved = null; - } - pt.push(c); - } - - cb(); - }; - - pt.once('readable', function() { - process.nextTick(function() { - pt.write(Buffer.from('d')); - pt.write(Buffer.from('ef'), function() { - pt.end(); - t.end(); - }); - t.equal(pt.read().toString(), 'abcdef'); - t.equal(pt.read(), null); - }); - }); - - pt.write(Buffer.from('abc')); - }); - - - t.test('passthrough event emission', function(t) { - var pt = new PassThrough(); - var emits = 0; - pt.on('readable', function() { - var state = pt._readableState; - //console.error('>>> emit readable %d', emits); - emits++; - }); - - var i = 0; - - pt.write(Buffer.from('foog')); - - //console.error('need emit 0'); - pt.write(Buffer.from('bark')); - - //console.error('should have emitted readable now 1 === %d', emits); - t.equal(emits, 1); - - t.equal(pt.read(5).toString(), 'foogb'); - t.equal(pt.read(5) + '', 'null'); - - //console.error('need emit 1'); - - pt.write(Buffer.from('bazy')); - //console.error('should have emitted, but not again'); - pt.write(Buffer.from('kuel')); - - //console.error('should have emitted readable now 2 === %d', emits); - t.equal(emits, 2); - - t.equal(pt.read(5).toString(), 'arkba'); - t.equal(pt.read(5).toString(), 'zykue'); - t.equal(pt.read(5), null); - - //console.error('need emit 2'); - - pt.end(); - - t.equal(emits, 3); - - t.equal(pt.read(5).toString(), 'l'); - t.equal(pt.read(5), null); - - //console.error('should not have emitted again'); - t.equal(emits, 3); - t.end(); - }); - - t.test('passthrough event emission reordered', function(t) { - var pt = new PassThrough(); - var emits = 0; - pt.on('readable', function() { - //console.error('emit readable', emits); - emits++; - }); - - pt.write(Buffer.from('foog')); - //console.error('need emit 0'); - pt.write(Buffer.from('bark')); - //console.error('should have emitted readable now 1 === %d', emits); - t.equal(emits, 1); - - t.equal(pt.read(5).toString(), 'foogb'); - t.equal(pt.read(5), null); - - //console.error('need emit 1'); - pt.once('readable', function() { - t.equal(pt.read(5).toString(), 'arkba'); - - t.equal(pt.read(5), null); - - //console.error('need emit 2'); - pt.once('readable', function() { - t.equal(pt.read(5).toString(), 'zykue'); - t.equal(pt.read(5), null); - pt.once('readable', function() { - t.equal(pt.read(5).toString(), 'l'); - t.equal(pt.read(5), null); - t.equal(emits, 4); - t.end(); - }); - pt.end(); - }); - pt.write(Buffer.from('kuel')); - }); - - pt.write(Buffer.from('bazy')); - }); - - t.test('passthrough facaded', function(t) { - //console.error('passthrough facaded'); - var pt = new PassThrough(); - var datas = []; - pt.on('data', function(chunk) { - datas.push(chunk.toString()); - }); - - pt.on('end', function() { - t.same(datas, ['foog', 'bark', 'bazy', 'kuel']); - t.end(); - }); - - pt.write(Buffer.from('foog')); - setTimeout(function() { - pt.write(Buffer.from('bark')); - setTimeout(function() { - pt.write(Buffer.from('bazy')); - setTimeout(function() { - pt.write(Buffer.from('kuel')); - setTimeout(function() { - pt.end(); - }, 10); - }, 10); - }, 10); - }, 10); - }); - - t.test('object transform (json parse)', function(t) { - //console.error('json parse stream'); - var jp = new Transform({ objectMode: true }); - jp._transform = function(data, encoding, cb) { - try { - jp.push(JSON.parse(data)); - cb(); - } catch (er) { - cb(er); - } - }; - - // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - var objects = [ - { foo: 'bar' }, - 100, - 'string', - { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } } - ]; - - var ended = false; - jp.on('end', function() { - ended = true; - }); - - forEach(objects, function(obj) { - jp.write(JSON.stringify(obj)); - var res = jp.read(); - t.same(res, obj); - }); - - jp.end(); - // read one more time to get the 'end' event - jp.read(); - - process.nextTick(function() { - t.ok(ended); - t.end(); - }); - }); - - t.test('object transform (json stringify)', function(t) { - //console.error('json parse stream'); - var js = new Transform({ objectMode: true }); - js._transform = function(data, encoding, cb) { - try { - js.push(JSON.stringify(data)); - cb(); - } catch (er) { - cb(er); - } - }; - - // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - var objects = [ - { foo: 'bar' }, - 100, - 'string', - { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } } - ]; - - var ended = false; - js.on('end', function() { - ended = true; - }); - - forEach(objects, function(obj) { - js.write(obj); - var res = js.read(); - t.equal(res, JSON.stringify(obj)); - }); - - js.end(); - // read one more time to get the 'end' event - js.read(); - - process.nextTick(function() { - t.ok(ended); - t.end(); - }); - }); - - function forEach (xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } - } -}; diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js deleted file mode 100644 index d3ebbe86c0..0000000000 --- a/test/browser/test-stream2-unpipe-drain.js +++ /dev/null @@ -1,65 +0,0 @@ -'use strict'; -var common = require('../common'); -var stream = require('../../'); - -var crypto = require('crypto'); - -var inherits = require('inherits'); -module.exports = function (t) { - t.test('unpipe drain', function (t) { - try { - crypto.randomBytes(9); - } catch(_) { - t.ok(true, 'does not suport random, skipping'); - return t.end(); - } - function TestWriter() { - stream.Writable.call(this); - } - inherits(TestWriter, stream.Writable); - - TestWriter.prototype._write = function(buffer, encoding, callback) { - //console.log('write called'); - // super slow write stream (callback never called) - }; - - var dest = new TestWriter(); - - function TestReader(id) { - stream.Readable.call(this); - this.reads = 0; - } - inherits(TestReader, stream.Readable); - - TestReader.prototype._read = function(size) { - this.reads += 1; - this.push(crypto.randomBytes(size)); - }; - - var src1 = new TestReader(); - var src2 = new TestReader(); - - src1.pipe(dest); - - src1.once('readable', function() { - process.nextTick(function() { - - src2.pipe(dest); - - src2.once('readable', function() { - process.nextTick(function() { - - src1.unpipe(dest); - }); - }); - }); - }); - - - dest.on('unpipe', function() { - t.equal(src1.reads, 2); - t.equal(src2.reads, 1); - t.end(); - }); - }); -} diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js deleted file mode 100644 index ca08fdb1b9..0000000000 --- a/test/browser/test-stream2-writable.js +++ /dev/null @@ -1,375 +0,0 @@ -'use strict'; -var common = require('../common'); -var W = require('../../lib/_stream_writable'); -var D = require('../../lib/_stream_duplex'); - -var inherits = require('inherits'); -inherits(TestWriter, W); - -function TestWriter() { - W.apply(this, arguments); - this.buffer = []; - this.written = 0; -} - -TestWriter.prototype._write = function(chunk, encoding, cb) { - // simulate a small unpredictable latency - setTimeout(function() { - this.buffer.push(chunk.toString()); - this.written += chunk.length; - cb(); - }.bind(this), Math.floor(Math.random() * 10)); -}; -inherits(Processstdout, W); - -function Processstdout() { - W.apply(this, arguments); - this.buffer = []; - this.written = 0; -} - -Processstdout.prototype._write = function(chunk, encoding, cb) { - //console.log(chunk.toString()); - cb(); -}; -var chunks = new Array(50); -for (var i = 0; i < chunks.length; i++) { - chunks[i] = new Array(i + 1).join('x'); -} - -module.exports = function (t) { - var test = t.test; - - if (!process.stdout) { - process.stdout = new Processstdout(); - } - - test('write fast', function(t) { - var tw = new TestWriter({ - highWaterMark: 100 - }); - - tw.on('finish', function() { - t.same(tw.buffer, chunks, 'got chunks in the right order'); - t.end(); - }); - - forEach(chunks, function(chunk) { - // screw backpressure. Just buffer it all up. - tw.write(chunk); - }); - tw.end(); - }); - - test('write slow', function(t) { - var tw = new TestWriter({ - highWaterMark: 100 - }); - - tw.on('finish', function() { - t.same(tw.buffer, chunks, 'got chunks in the right order'); - t.end(); - }); - - var i = 0; - (function W() { - tw.write(chunks[i++]); - if (i < chunks.length) - setTimeout(W, 10); - else - tw.end(); - })(); - }); - - test('write backpressure', function(t) { - var tw = new TestWriter({ - highWaterMark: 50 - }); - - var drains = 0; - - tw.on('finish', function() { - t.same(tw.buffer, chunks, 'got chunks in the right order'); - t.equal(drains, 17); - t.end(); - }); - - tw.on('drain', function() { - drains++; - }); - - var i = 0; - (function W() { - do { - var ret = tw.write(chunks[i++]); - } while (ret !== false && i < chunks.length); - - if (i < chunks.length) { - t.ok(tw._writableState.length >= 50); - tw.once('drain', W); - } else { - tw.end(); - } - })(); - }); - - test('write bufferize', function(t) { - var tw = new TestWriter({ - highWaterMark: 100 - }); - - var encodings = - [ 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined ]; - - tw.on('finish', function() { - t.same(tw.buffer, chunks, 'got the expected chunks'); - }); - - forEach(chunks, function(chunk, i) { - var enc = encodings[ i % encodings.length ]; - chunk = Buffer.from(chunk); - tw.write(chunk.toString(enc), enc); - }); - t.end(); - }); - - test('write no bufferize', function(t) { - var tw = new TestWriter({ - highWaterMark: 100, - decodeStrings: false - }); - - tw._write = function(chunk, encoding, cb) { - t.equals(typeof chunk, 'string'); - chunk = Buffer.from(chunk, encoding); - return TestWriter.prototype._write.call(this, chunk, encoding, cb); - }; - - var encodings = - [ 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined ]; - - tw.on('finish', function() { - t.same(tw.buffer, chunks, 'got the expected chunks'); - }); - - forEach(chunks, function(chunk, i) { - var enc = encodings[ i % encodings.length ]; - chunk = Buffer.from(chunk); - tw.write(chunk.toString(enc), enc); - }); - t.end(); - }); - - test('write callbacks', function(t) { - var callbacks = chunks.map(function(chunk, i) { - return [i, function(er) { - callbacks._called[i] = chunk; - }]; - }).reduce(function(set, x) { - set['callback-' + x[0]] = x[1]; - return set; - }, {}); - callbacks._called = []; - - var tw = new TestWriter({ - highWaterMark: 100 - }); - - tw.on('finish', function() { - process.nextTick(function() { - t.same(tw.buffer, chunks, 'got chunks in the right order'); - t.same(callbacks._called, chunks, 'called all callbacks'); - t.end(); - }); - }); - - forEach(chunks, function(chunk, i) { - tw.write(chunk, callbacks['callback-' + i]); - }); - tw.end(); - }); - - test('end callback', function(t) { - var tw = new TestWriter(); - tw.end(function() { - t.end(); - }); - }); - - test('end callback with chunk', function(t) { - var tw = new TestWriter(); - tw.end(Buffer.from('hello world'), function() { - t.end(); - }); - }); - - test('end callback with chunk and encoding', function(t) { - var tw = new TestWriter(); - tw.end('hello world', 'ascii', function() { - t.end(); - }); - }); - - test('end callback after .write() call', function(t) { - var tw = new TestWriter(); - tw.write(Buffer.from('hello world')); - tw.end(function() { - t.end(); - }); - }); - - test('end callback called after write callback', function(t) { - var tw = new TestWriter(); - var writeCalledback = false; - tw.write(Buffer.from('hello world'), function() { - writeCalledback = true; - }); - tw.end(function() { - t.equal(writeCalledback, true); - t.end(); - }); - }); - - test('encoding should be ignored for buffers', function(t) { - var tw = new W(); - var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; - tw._write = function(chunk, encoding, cb) { - t.equal(chunk.toString('hex'), hex); - t.end(); - }; - var buf = Buffer.from(hex, 'hex'); - tw.write(buf, 'binary'); - }); - - test('writables are not pipable', function(t) { - var w = new W(); - w._write = function() {}; - var gotError = false; - w.on('error', function(er) { - gotError = true; - }); - w.pipe(process.stdout); - t.ok(gotError); - t.end(); - }); - - test('duplexes are pipable', function(t) { - var d = new D(); - d._read = function() {}; - d._write = function() {}; - var gotError = false; - d.on('error', function(er) { - gotError = true; - }); - d.pipe(process.stdout); - t.ok(!gotError); - t.end(); - }); - - test('end(chunk) two times is an error', function(t) { - var w = new W(); - w._write = function() {}; - var gotError = false; - w.on('error', function(er) { - gotError = true; - t.equal(er.message, 'write after end'); - }); - w.end('this is the end'); - w.end('and so is this'); - process.nextTick(function() { - t.ok(gotError); - t.end(); - }); - }); - - test('dont end while writing', function(t) { - var w = new W(); - var wrote = false; - w._write = function(chunk, e, cb) { - t.ok(!this.writing); - wrote = true; - this.writing = true; - setTimeout(function() { - this.writing = false; - cb(); - }); - }; - w.on('finish', function() { - t.ok(wrote); - t.end(); - }); - w.write(Buffer(0)); - w.end(); - }); - - test('finish does not come before write cb', function(t) { - var w = new W(); - var writeCb = false; - w._write = function(chunk, e, cb) { - setTimeout(function() { - writeCb = true; - cb(); - }, 10); - }; - w.on('finish', function() { - t.ok(writeCb); - t.end(); - }); - w.write(Buffer(0)); - w.end(); - }); - - test('finish does not come before sync _write cb', function(t) { - var w = new W(); - var writeCb = false; - w._write = function(chunk, e, cb) { - cb(); - }; - w.on('finish', function() { - t.ok(writeCb); - t.end(); - }); - w.write(Buffer(0), function(er) { - writeCb = true; - }); - w.end(); - }); - - test('finish is emitted if last chunk is empty', function(t) { - var w = new W(); - w._write = function(chunk, e, cb) { - process.nextTick(cb); - }; - w.on('finish', function() { - t.end(); - }); - w.write(Buffer(1)); - w.end(Buffer(0)); - }); - - function forEach (xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } - } -} diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js deleted file mode 100644 index 9684ea7774..0000000000 --- a/test/browser/test-stream3-pause-then-read.js +++ /dev/null @@ -1,150 +0,0 @@ -'use strict'; -var common = require('../common'); - -var stream = require('../../'); -var Readable = stream.Readable; -var Writable = stream.Writable; - -module.exports = function (t){ - t.test('pause then read', function (t) { - var totalChunks = 100; - var chunkSize = 99; - var expectTotalData = totalChunks * chunkSize; - var expectEndingData = expectTotalData; - - var r = new Readable({ highWaterMark: 1000 }); - var chunks = totalChunks; - r._read = function(n) { - if (!(chunks % 2)) - setImmediate(push); - else if (!(chunks % 3)) - process.nextTick(push); - else - push(); - }; - - var totalPushed = 0; - function push() { - var chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null; - if (chunk) { - totalPushed += chunk.length; - chunk.fill('x'); - } - r.push(chunk); - } - - read100(); - - // first we read 100 bytes - function read100() { - readn(100, onData); - } - - function readn(n, then) { - //console.error('read %d', n); - expectEndingData -= n; - ;(function read() { - var c = r.read(n); - if (!c) - r.once('readable', read); - else { - t.equal(c.length, n); - t.notOk(r._readableState.flowing); - then(); - } - })(); - } - - // then we listen to some data events - function onData() { - expectEndingData -= 100; - //console.error('onData'); - var seen = 0; - r.on('data', function od(c) { - seen += c.length; - if (seen >= 100) { - // seen enough - r.removeListener('data', od); - r.pause(); - if (seen > 100) { - // oh no, seen too much! - // put the extra back. - var diff = seen - 100; - r.unshift(c.slice(c.length - diff)); - console.error('seen too much', seen, diff); - } - - // Nothing should be lost in between - setImmediate(pipeLittle); - } - }); - } - - // Just pipe 200 bytes, then unshift the extra and unpipe - function pipeLittle() { - expectEndingData -= 200; - //console.error('pipe a little'); - var w = new Writable(); - var written = 0; - w.on('finish', function() { - t.equal(written, 200); - setImmediate(read1234); - }); - w._write = function(chunk, encoding, cb) { - written += chunk.length; - if (written >= 200) { - r.unpipe(w); - w.end(); - cb(); - if (written > 200) { - var diff = written - 200; - written -= diff; - r.unshift(chunk.slice(chunk.length - diff)); - } - } else { - setImmediate(cb); - } - }; - r.pipe(w); - } - - // now read 1234 more bytes - function read1234() { - readn(1234, resumePause); - } - - function resumePause() { - //console.error('resumePause'); - // don't read anything, just resume and re-pause a whole bunch - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - setImmediate(pipe); - } - - - function pipe() { - //console.error('pipe the rest'); - var w = new Writable(); - var written = 0; - w._write = function(chunk, encoding, cb) { - written += chunk.length; - cb(); - }; - w.on('finish', function() { - //console.error('written', written, totalPushed); - t.equal(written, expectEndingData); - t.equal(totalPushed, expectTotalData); - t.end(); - }); - r.pipe(w); - } - }); -} diff --git a/test/common/README.md b/test/common/README.md deleted file mode 100644 index 0e773debfc..0000000000 --- a/test/common/README.md +++ /dev/null @@ -1,802 +0,0 @@ -/**/ - require('@babel/polyfill'); - var util = require('util'); - for (var i in util) exports[i] = util[i]; - /**/# Node.js Core Test Common Modules - -This directory contains modules used to test the Node.js implementation. - -## Table of Contents - -* [Benchmark module](#benchmark-module) -* [Common module API](#common-module-api) -* [Countdown module](#countdown-module) -* [DNS module](#dns-module) -* [Duplex pair helper](#duplex-pair-helper) -* [Fixtures module](#fixtures-module) -* [Heap dump checker module](#heap-dump-checker-module) -* [HTTP2 module](#http2-module) -* [Internet module](#internet-module) -* [tick module](#tick-module) -* [tmpdir module](#tmpdir-module) -* [WPT module](#wpt-module) - -## Benchmark Module - -The `benchmark` module is used by tests to run benchmarks. - -### runBenchmark(name, args, env) - -* `name` [<string>] Name of benchmark suite to be run. -* `args` [<Array>] Array of environment variable key/value pairs (ex: - `n=1`) to be applied via `--set`. -* `env` [<Object>] Environment variables to be applied during the run. - -## Common Module API - -The `common` module is used by tests for consistency across repeated -tasks. - -### allowGlobals(...whitelist) -* `whitelist` [<Array>] Array of Globals -* return [<Array>] - -Takes `whitelist` and concats that with predefined `knownGlobals`. - -### busyLoop(time) -* `time` [<number>] - -Blocks for `time` amount of time. - -### canCreateSymLink() -* return [<boolean>] - -Checks whether the current running process can create symlinks. On Windows, this -returns `false` if the process running doesn't have privileges to create -symlinks -([SeCreateSymbolicLinkPrivilege](https://msdn.microsoft.com/en-us/library/windows/desktop/bb530716(v=vs.85).aspx)). -On non-Windows platforms, this always returns `true`. - -### createZeroFilledFile(filename) - -Creates a 10 MB file of all null characters. - -### disableCrashOnUnhandledRejection() - -Removes the `process.on('unhandledRejection')` handler that crashes the process -after a tick. The handler is useful for tests that use Promises and need to make -sure no unexpected rejections occur, because currently they result in silent -failures. However, it is useful in some rare cases to disable it, for example if -the `unhandledRejection` hook is directly used by the test. - -### enoughTestMem -* [<boolean>] - -Indicates if there is more than 1gb of total memory. - -### expectsError([fn, ]settings[, exact]) -* `fn` [<Function>] a function that should throw. -* `settings` [<Object>] - that must contain the `code` property plus any of the other following - properties (some properties only apply for `AssertionError`): - * `code` [<string>] - expected error must have this value for its `code` property. - * `type` [<Function>] - expected error must be an instance of `type` and must be an Error subclass. - * `message` [<string>] or [<RegExp>] - if a string is provided for `message`, expected error must have it for its - `message` property; if a regular expression is provided for `message`, the - regular expression must match the `message` property of the expected error. - * `name` [<string>] - expected error must have this value for its `name` property. - * `info` <Object> expected error must have the same `info` property - that is deeply equal to this value. - * `generatedMessage` [<string>] - (`AssertionError` only) expected error must have this value for its - `generatedMessage` property. - * `actual` <any> - (`AssertionError` only) expected error must have this value for its - `actual` property. - * `expected` <any> - (`AssertionError` only) expected error must have this value for its - `expected` property. - * `operator` <any> - (`AssertionError` only) expected error must have this value for its - `operator` property. -* `exact` [<number>] default = 1 -* return [<Function>] - - If `fn` is provided, it will be passed to `assert.throws` as first argument - and `undefined` will be returned. - Otherwise a function suitable as callback or for use as a validation function - passed as the second argument to `assert.throws()` will be returned. If the - returned function has not been called exactly `exact` number of times when the - test is complete, then the test will fail. - -### expectWarning(name, expected, code) -* `name` [<string>] -* `expected` [<string>] | [<Array>] -* `code` [<string>] - -Tests whether `name`, `expected`, and `code` are part of a raised warning. If -an expected warning does not have a code then `common.noWarnCode` can be used -to indicate this. - -### getArrayBufferViews(buf) -* `buf` [<Buffer>] -* return [<ArrayBufferView[]>] - -Returns an instance of all possible `ArrayBufferView`s of the provided Buffer. - -### getBufferSources(buf) -* `buf` [<Buffer>] -* return [<BufferSource[]>] - -Returns an instance of all possible `BufferSource`s of the provided Buffer, -consisting of all `ArrayBufferView` and an `ArrayBuffer`. - -### getCallSite(func) -* `func` [<Function>] -* return [<string>] - -Returns the file name and line number for the provided Function. - -### getTTYfd() - -Attempts to get a valid TTY file descriptor. Returns `-1` if it fails. - -The TTY file descriptor is assumed to be capable of being writable. - -### hasCrypto -* [<boolean>] - -Indicates whether OpenSSL is available. - -### hasFipsCrypto -* [<boolean>] - -Indicates `hasCrypto` and `crypto` with fips. - -### hasIntl -* [<boolean>] - -Indicates if [internationalization] is supported. - -### hasIPv6 -* [<boolean>] - -Indicates whether `IPv6` is supported on this platform. - -### hasMultiLocalhost -* [<boolean>] - -Indicates if there are multiple localhosts available. - -### inFreeBSDJail -* [<boolean>] - -Checks whether free BSD Jail is true or false. - -### isAIX -* [<boolean>] - -Platform check for Advanced Interactive eXecutive (AIX). - -### isAlive(pid) -* `pid` [<number>] -* return [<boolean>] - -Attempts to 'kill' `pid` - -### isFreeBSD -* [<boolean>] - -Platform check for Free BSD. - -### isLinux -* [<boolean>] - -Platform check for Linux. - -### isLinuxPPCBE -* [<boolean>] - -Platform check for Linux on PowerPC. - -### isOSX -* [<boolean>] - -Platform check for macOS. - -### isSunOS -* [<boolean>] - -Platform check for SunOS. - -### isWindows -* [<boolean>] - -Platform check for Windows. - -### localhostIPv4 -* [<string>] - -IP of `localhost`. - -### localIPv6Hosts -* [<Array>] - -Array of IPV6 representations for `localhost`. - -### mustCall([fn][, exact]) -* `fn` [<Function>] default = () => {} -* `exact` [<number>] default = 1 -* return [<Function>] - -Returns a function that calls `fn`. If the returned function has not been called -exactly `exact` number of times when the test is complete, then the test will -fail. - -If `fn` is not provided, an empty function will be used. - -### mustCallAtLeast([fn][, minimum]) -* `fn` [<Function>] default = () => {} -* `minimum` [<number>] default = 1 -* return [<Function>] - -Returns a function that calls `fn`. If the returned function has not been called -at least `minimum` number of times when the test is complete, then the test will -fail. - -If `fn` is not provided, an empty function will be used. - -### mustNotCall([msg]) -* `msg` [<string>] default = 'function should not have been called' -* return [<Function>] - -Returns a function that triggers an `AssertionError` if it is invoked. `msg` is -used as the error message for the `AssertionError`. - -### nodeProcessAborted(exitCode, signal) -* `exitCode` [<number>] -* `signal` [<string>] -* return [<boolean>] - -Returns `true` if the exit code `exitCode` and/or signal name `signal` represent -the exit code and/or signal name of a node process that aborted, `false` -otherwise. - -### noWarnCode -See `common.expectWarning()` for usage. - -### opensslCli -* [<boolean>] - -Indicates whether 'opensslCli' is supported. - -### platformTimeout(ms) -* `ms` [<number>] -* return [<number>] - -Platform normalizes timeout. - -### PIPE -* [<string>] - -Path to the test socket. - -### PORT -* [<number>] - -A port number for tests to use if one is needed. - -### printSkipMessage(msg) -* `msg` [<string>] - -Logs '1..0 # Skipped: ' + `msg` - -### pwdCommand -* [<array>] First two argument for the `spawn`/`exec` functions. - -Platform normalized `pwd` command options. Usage example: -```js -const common = require('../common'); -const { spawn } = require('child_process'); - -spawn(...common.pwdCommand, { stdio: ['pipe'] }); -``` - -### rootDir -* [<string>] - -Path to the 'root' directory. either `/` or `c:\\` (windows) - -### runWithInvalidFD(func) -* `func` [<Function>] - -Runs `func` with an invalid file descriptor that is an unsigned integer and -can be used to trigger `EBADF` as the first argument. If no such file -descriptor could be generated, a skip message will be printed and the `func` -will not be run. - -### skip(msg) -* `msg` [<string>] - -Logs '1..0 # Skipped: ' + `msg` and exits with exit code `0`. - -### skipIfEslintMissing() - -Skip the rest of the tests in the current file when `ESLint` is not available -at `tools/node_modules/eslint` - -### skipIfInspectorDisabled() - -Skip the rest of the tests in the current file when the Inspector -was disabled at compile time. - -### skipIf32Bits() - -Skip the rest of the tests in the current file when the Node.js executable -was compiled with a pointer size smaller than 64 bits. - -### skipIfWorker() - -Skip the rest of the tests in the current file when not running on a main -thread. - -## ArrayStream Module - -The `ArrayStream` module provides a simple `Stream` that pushes elements from -a given array. - - -```js -const ArrayStream = require('../common/arraystream'); -const stream = new ArrayStream(); -stream.run(['a', 'b', 'c']); -``` - -It can be used within tests as a simple mock stream. - -## Countdown Module - -The `Countdown` module provides a simple countdown mechanism for tests that -require a particular action to be taken after a given number of completed -tasks (for instance, shutting down an HTTP server after a specific number of -requests). The Countdown will fail the test if the remainder did not reach 0. - - -```js -const Countdown = require('../common/countdown'); - -function doSomething() { - console.log('.'); -} - -const countdown = new Countdown(2, doSomething); -countdown.dec(); -countdown.dec(); -``` - -### new Countdown(limit, callback) - -* `limit` {number} -* `callback` {function} - -Creates a new `Countdown` instance. - -### Countdown.prototype.dec() - -Decrements the `Countdown` counter. - -### Countdown.prototype.remaining - -Specifies the remaining number of times `Countdown.prototype.dec()` must be -called before the callback is invoked. - -## DNS Module - -The `DNS` module provides utilities related to the `dns` built-in module. - -### errorLookupMock(code, syscall) - -* `code` [<string>] Defaults to `dns.mockedErrorCode`. -* `syscall` [<string>] Defaults to `dns.mockedSysCall`. -* return [<Function>] - -A mock for the `lookup` option of `net.connect()` that would result in an error -with the `code` and the `syscall` specified. Returns a function that has the -same signature as `dns.lookup()`. - -### mockedErrorCode - -The default `code` of errors generated by `errorLookupMock`. - -### mockedSysCall - -The default `syscall` of errors generated by `errorLookupMock`. - -### readDomainFromPacket(buffer, offset) - -* `buffer` [<Buffer>] -* `offset` [<number>] -* return [<Object>] - -Reads the domain string from a packet and returns an object containing the -number of bytes read and the domain. - -### parseDNSPacket(buffer) - -* `buffer` [<Buffer>] -* return [<Object>] - -Parses a DNS packet. Returns an object with the values of the various flags of -the packet depending on the type of packet. - -### writeIPv6(ip) - -* `ip` [<string>] -* return [<Buffer>] - -Reads an IPv6 String and returns a Buffer containing the parts. - -### writeDomainName(domain) - -* `domain` [<string>] -* return [<Buffer>] - -Reads a Domain String and returns a Buffer containing the domain. - -### writeDNSPacket(parsed) - -* `parsed` [<Object>] -* return [<Buffer>] - -Takes in a parsed Object and writes its fields to a DNS packet as a Buffer -object. - -## Duplex pair helper - -The `common/duplexpair` module exports a single function `makeDuplexPair`, -which returns an object `{ clientSide, serverSide }` where each side is a -`Duplex` stream connected to the other side. - -There is no difference between client or server side beyond their names. - -## Fixtures Module - -The `common/fixtures` module provides convenience methods for working with -files in the `test/fixtures` directory. - -### fixtures.fixturesDir - -* [<string>] - -The absolute path to the `test/fixtures/` directory. - -### fixtures.path(...args) - -* `...args` [<string>] - -Returns the result of `path.join(fixtures.fixturesDir, ...args)`. - -### fixtures.readSync(args[, enc]) - -* `args` [<string>] | [<Array>] - -Returns the result of -`fs.readFileSync(path.join(fixtures.fixturesDir, ...args), 'enc')`. - -### fixtures.readKey(arg[, enc]) - -* `arg` [<string>] - -Returns the result of -`fs.readFileSync(path.join(fixtures.fixturesDir, 'keys', arg), 'enc')`. - -## Heap dump checker module - -This provides utilities for checking the validity of heap dumps. -This requires the usage of `--expose-internals`. - -### heap.recordState() - -Create a heap dump and an embedder graph copy for inspection. -The returned object has a `validateSnapshotNodes` function similar to the -one listed below. (`heap.validateSnapshotNodes(...)` is a shortcut for -`heap.recordState().validateSnapshotNodes(...)`.) - -### heap.validateSnapshotNodes(name, expected, options) - -* `name` [<string>] Look for this string as the name of heap dump nodes. -* `expected` [<Array>] A list of objects, possibly with an `children` - property that points to expected other adjacent nodes. -* `options` [<Array>] - * `loose` [<boolean>] Do not expect an exact listing of occurrences - of nodes with name `name` in `expected`. - -Create a heap dump and an embedder graph copy and validate occurrences. - - -```js -validateSnapshotNodes('TLSWRAP', [ - { - children: [ - { name: 'enc_out' }, - { name: 'enc_in' }, - { name: 'TLSWrap' } - ] - } -]); -``` - -## hijackstdio Module - -The `hijackstdio` module provides utility functions for temporarily redirecting -`stdout` and `stderr` output. - - -```js -const { hijackStdout, restoreStdout } = require('../common/hijackstdio'); - -hijackStdout((data) => { - /* Do something with data */ - restoreStdout(); -}); - -console.log('this is sent to the hijacked listener'); -``` - -### hijackStderr(listener) -* `listener` [<Function>]: a listener with a single parameter - called `data`. - -Eavesdrop to `process.stderr.write()` calls. Once `process.stderr.write()` is -called, `listener` will also be called and the `data` of `write` function will -be passed to `listener`. What's more, `process.stderr.writeTimes` is a count of -the number of calls. - -### hijackStdout(listener) -* `listener` [<Function>]: a listener with a single parameter - called `data`. - -Eavesdrop to `process.stdout.write()` calls. Once `process.stdout.write()` is -called, `listener` will also be called and the `data` of `write` function will -be passed to `listener`. What's more, `process.stdout.writeTimes` is a count of -the number of calls. - -### restoreStderr() - -Restore the original `process.stderr.write()`. Used to restore `stderr` to its -original state after calling [`hijackstdio.hijackStdErr()`][]. - -### restoreStdout() - -Restore the original `process.stdout.write()`. Used to restore `stdout` to its -original state after calling [`hijackstdio.hijackStdOut()`][]. - - -## HTTP/2 Module - -The http2.js module provides a handful of utilities for creating mock HTTP/2 -frames for testing of HTTP/2 endpoints - - -```js -const http2 = require('../common/http2'); -``` - -### Class: Frame - -The `http2.Frame` is a base class that creates a `Buffer` containing a -serialized HTTP/2 frame header. - - -```js -// length is a 24-bit unsigned integer -// type is an 8-bit unsigned integer identifying the frame type -// flags is an 8-bit unsigned integer containing the flag bits -// id is the 32-bit stream identifier, if any. -const frame = new http2.Frame(length, type, flags, id); - -// Write the frame data to a socket -socket.write(frame.data); -``` - -The serialized `Buffer` may be retrieved using the `frame.data` property. - -### Class: DataFrame extends Frame - -The `http2.DataFrame` is a subclass of `http2.Frame` that serializes a `DATA` -frame. - - -```js -// id is the 32-bit stream identifier -// payload is a Buffer containing the DATA payload -// padlen is an 8-bit integer giving the number of padding bytes to include -// final is a boolean indicating whether the End-of-stream flag should be set, -// defaults to false. -const frame = new http2.DataFrame(id, payload, padlen, final); - -socket.write(frame.data); -``` - -### Class: HeadersFrame - -The `http2.HeadersFrame` is a subclass of `http2.Frame` that serializes a -`HEADERS` frame. - - -```js -// id is the 32-bit stream identifier -// payload is a Buffer containing the HEADERS payload (see either -// http2.kFakeRequestHeaders or http2.kFakeResponseHeaders). -// padlen is an 8-bit integer giving the number of padding bytes to include -// final is a boolean indicating whether the End-of-stream flag should be set, -// defaults to false. -const frame = new http2.HeadersFrame(id, payload, padlen, final); - -socket.write(frame.data); -``` - -### Class: SettingsFrame - -The `http2.SettingsFrame` is a subclass of `http2.Frame` that serializes an -empty `SETTINGS` frame. - - -```js -// ack is a boolean indicating whether or not to set the ACK flag. -const frame = new http2.SettingsFrame(ack); - -socket.write(frame.data); -``` - -### http2.kFakeRequestHeaders - -Set to a `Buffer` instance that contains a minimal set of serialized HTTP/2 -request headers to be used as the payload of a `http2.HeadersFrame`. - - -```js -const frame = new http2.HeadersFrame(1, http2.kFakeRequestHeaders, 0, true); - -socket.write(frame.data); -``` - -### http2.kFakeResponseHeaders - -Set to a `Buffer` instance that contains a minimal set of serialized HTTP/2 -response headers to be used as the payload a `http2.HeadersFrame`. - - -```js -const frame = new http2.HeadersFrame(1, http2.kFakeResponseHeaders, 0, true); - -socket.write(frame.data); -``` - -### http2.kClientMagic - -Set to a `Buffer` containing the preamble bytes an HTTP/2 client must send -upon initial establishment of a connection. - - -```js -socket.write(http2.kClientMagic); -``` - -## Internet Module - -The `common/internet` module provides utilities for working with -internet-related tests. - -### internet.addresses - -* [<Object>] - * `INET_HOST` [<string>] A generic host that has registered common - DNS records, supports both IPv4 and IPv6, and provides basic HTTP/HTTPS - services - * `INET4_HOST` [<string>] A host that provides IPv4 services - * `INET6_HOST` [<string>] A host that provides IPv6 services - * `INET4_IP` [<string>] An accessible IPv4 IP, defaults to the - Google Public DNS IPv4 address - * `INET6_IP` [<string>] An accessible IPv6 IP, defaults to the - Google Public DNS IPv6 address - * `INVALID_HOST` [<string>] An invalid host that cannot be resolved - * `MX_HOST` [<string>] A host with MX records registered - * `SRV_HOST` [<string>] A host with SRV records registered - * `PTR_HOST` [<string>] A host with PTR records registered - * `NAPTR_HOST` [<string>] A host with NAPTR records registered - * `SOA_HOST` [<string>] A host with SOA records registered - * `CNAME_HOST` [<string>] A host with CNAME records registered - * `NS_HOST` [<string>] A host with NS records registered - * `TXT_HOST` [<string>] A host with TXT records registered - * `DNS4_SERVER` [<string>] An accessible IPv4 DNS server - * `DNS6_SERVER` [<string>] An accessible IPv6 DNS server - -A set of addresses for internet-related tests. All properties are configurable -via `NODE_TEST_*` environment variables. For example, to configure -`internet.addresses.INET_HOST`, set the environment -variable `NODE_TEST_INET_HOST` to a specified host. - -## ongc Module - -The `ongc` module allows a garbage collection listener to be installed. The -module exports a single `onGC()` function. - -```js -require('../common'); -const onGC = require('../common/ongc'); - -onGC({}, { ongc() { console.log('collected'); } }); -``` - -### onGC(target, listener) -* `target` [<Object>] -* `listener` [<Object>] - * `ongc` [<Function>] - -Installs a GC listener for the collection of `target`. - -This uses `async_hooks` for GC tracking. This means that it enables -`async_hooks` tracking, which may affect the test functionality. It also -means that between a `global.gc()` call and the listener being invoked -a full `setImmediate()` invocation passes. - -`listener` is an object to make it easier to use a closure; the target object -should not be in scope when `listener.ongc()` is created. - - -## tick Module - -The `tick` module provides a helper function that can be used to call a callback -after a given number of event loop "ticks". - -### tick(x, cb) - -* `x` [<number>] Number of event loop "ticks". -* `cb` [<Function>] A callback function. - -## tmpdir Module - -The `tmpdir` module supports the use of a temporary directory for testing. - -### path -* [<string>] - -The realpath of the testing temporary directory. - -### refresh() - -Deletes and recreates the testing temporary directory. - -## WPT Module - -The wpt.js module is a port of parts of -[W3C testharness.js](https://github.com/w3c/testharness.js) for testing the -Node.js -[WHATWG URL API](https://nodejs.org/api/url.html#url_the_whatwg_url_api) -implementation with tests from -[W3C Web Platform Tests](https://github.com/w3c/web-platform-tests). - - -[<Array>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array -[<ArrayBufferView[]>]: https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView -[<Buffer>]: https://nodejs.org/api/buffer.html#buffer_class_buffer -[<Function>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function -[<Object>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object -[<RegExp>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp -[<boolean>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type -[<number>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type -[<string>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type -[`hijackstdio.hijackStdErr()`]: #hijackstderrlistener -[`hijackstdio.hijackStdOut()`]: #hijackstdoutlistener -[internationalization]: https://github.com/nodejs/node/wiki/Intl - -function forEach (xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} diff --git a/test/common/arraystream.js b/test/common/arraystream.js deleted file mode 100644 index 167f927dff..0000000000 --- a/test/common/arraystream.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var _require = require('../../'), - Stream = _require.Stream; - -function noop() {} // A stream to push an array into a REPL - - -function ArrayStream() { - this.run = function (data) { - var _this = this; - - forEach(data, function (line) { - _this.emit('data', "".concat(line, "\n")); - }); - }; -} - -Object.setPrototypeOf(ArrayStream.prototype, Stream.prototype); -Object.setPrototypeOf(ArrayStream, Stream); -ArrayStream.prototype.readable = true; -ArrayStream.prototype.writable = true; -ArrayStream.prototype.pause = noop; -ArrayStream.prototype.resume = noop; -ArrayStream.prototype.write = noop; -module.exports = ArrayStream; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/benchmark.js b/test/common/benchmark.js deleted file mode 100644 index 1b368bac58..0000000000 --- a/test/common/benchmark.js +++ /dev/null @@ -1,78 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var assert = require('assert'); - -var fork = require('child_process').fork; - -var path = require('path'); - -var runjs = path.join(__dirname, '..', '..', 'benchmark', 'run.js'); - -function runBenchmark(name, args, env) { - var argv = []; - - for (var _i = 0; _i < args.length; _i++) { - argv.push('--set'); - argv.push(args[_i]); - } - - argv.push(name); - var mergedEnv = Object.assign({}, process.env, env); - var child = fork(runjs, argv, { - env: mergedEnv, - stdio: ['inherit', 'pipe', 'inherit', 'ipc'] - }); - child.stdout.setEncoding('utf8'); - var stdout = ''; - child.stdout.on('data', function (line) { - stdout += line; - }); - child.on('exit', function (code, signal) { - assert.strictEqual(code, 0); - assert.strictEqual(signal, null); // This bit makes sure that each benchmark file is being sent settings such - // that the benchmark file runs just one set of options. This helps keep the - // benchmark tests from taking a long time to run. Therefore, each benchmark - // file should result in three lines of output: a blank line, a line with - // the name of the benchmark file, and a line with the only results that we - // get from testing the benchmark file. - - assert.ok(/^(?:\n.+?\n.+?\n)+$/.test(stdout), "benchmark file not running exactly one configuration in test: ".concat(stdout)); - }); -} - -module.exports = runBenchmark; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/countdown.js b/test/common/countdown.js deleted file mode 100644 index 39193672b5..0000000000 --- a/test/common/countdown.js +++ /dev/null @@ -1,80 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var assert = require('assert'); - -var kLimit = Symbol('limit'); -var kCallback = Symbol('callback'); - -var common = require('./'); - -var Countdown = -/*#__PURE__*/ -function () { - function Countdown(limit, cb) { - _classCallCheck(this, Countdown); - - assert.strictEqual(typeof limit, 'number'); - assert.strictEqual(typeof cb, 'function'); - this[kLimit] = limit; - this[kCallback] = common.mustCall(cb); - } - - _createClass(Countdown, [{ - key: "dec", - value: function dec() { - assert(this[kLimit] > 0, 'Countdown expired'); - if (--this[kLimit] === 0) this[kCallback](); - return this[kLimit]; - } - }, { - key: "remaining", - get: function get() { - return this[kLimit]; - } - }]); - - return Countdown; -}(); - -module.exports = Countdown; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/dns.js b/test/common/dns.js deleted file mode 100644 index f63b686fd2..0000000000 --- a/test/common/dns.js +++ /dev/null @@ -1,436 +0,0 @@ -"use strict"; - -function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); } - -function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } - -function _iterableToArrayLimit(arr, i) { if (!(Symbol.iterator in Object(arr) || Object.prototype.toString.call(arr) === "[object Arguments]")) { return; } var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } - -function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var assert = require('assert'); - -var os = require('os'); - -var types = { - A: 1, - AAAA: 28, - NS: 2, - CNAME: 5, - SOA: 6, - PTR: 12, - MX: 15, - TXT: 16, - ANY: 255 -}; -var classes = { - IN: 1 -}; // Naïve DNS parser/serializer. - -function readDomainFromPacket(buffer, offset) { - assert.ok(offset < buffer.length); - var length = buffer[offset]; - - if (length === 0) { - return { - nread: 1, - domain: '' - }; - } else if ((length & 0xC0) === 0) { - offset += 1; - var chunk = buffer.toString('ascii', offset, offset + length); // Read the rest of the domain. - - var _readDomainFromPacket = readDomainFromPacket(buffer, offset + length), - nread = _readDomainFromPacket.nread, - domain = _readDomainFromPacket.domain; - - return { - nread: 1 + length + nread, - domain: domain ? "".concat(chunk, ".").concat(domain) : chunk - }; - } else { - // Pointer to another part of the packet. - assert.strictEqual(length & 0xC0, 0xC0); // eslint-disable-next-line space-infix-ops, space-unary-ops - - var pointeeOffset = buffer.readUInt16BE(offset) & ~0xC000; - return { - nread: 2, - domain: readDomainFromPacket(buffer, pointeeOffset) - }; - } -} - -function parseDNSPacket(buffer) { - assert.ok(buffer.length > 12); - var parsed = { - id: buffer.readUInt16BE(0), - flags: buffer.readUInt16BE(2) - }; - var counts = [['questions', buffer.readUInt16BE(4)], ['answers', buffer.readUInt16BE(6)], ['authorityAnswers', buffer.readUInt16BE(8)], ['additionalRecords', buffer.readUInt16BE(10)]]; - var offset = 12; - - for (var _i = 0, _counts = counts; _i < _counts.length; _i++) { - var _counts$_i = _slicedToArray(_counts[_i], 2), - sectionName = _counts$_i[0], - count = _counts$_i[1]; - - parsed[sectionName] = []; - - for (var _i2 = 0; _i2 < count; ++_i2) { - var _readDomainFromPacket2 = readDomainFromPacket(buffer, offset), - nread = _readDomainFromPacket2.nread, - domain = _readDomainFromPacket2.domain; - - offset += nread; - var type = buffer.readUInt16BE(offset); - var rr = { - domain: domain, - cls: buffer.readUInt16BE(offset + 2) - }; - offset += 4; - - for (var name in types) { - if (types[name] === type) rr.type = name; - } - - if (sectionName !== 'questions') { - rr.ttl = buffer.readInt32BE(offset); - var dataLength = buffer.readUInt16BE(offset); - offset += 6; - - switch (type) { - case types.A: - assert.strictEqual(dataLength, 4); - rr.address = "".concat(buffer[offset + 0], ".").concat(buffer[offset + 1], ".") + "".concat(buffer[offset + 2], ".").concat(buffer[offset + 3]); - break; - - case types.AAAA: - assert.strictEqual(dataLength, 16); - rr.address = buffer.toString('hex', offset, offset + 16).replace(/(.{4}(?!$))/g, '$1:'); - break; - - case types.TXT: - { - var position = offset; - rr.entries = []; - - while (position < offset + dataLength) { - var txtLength = buffer[offset]; - rr.entries.push(buffer.toString('utf8', position + 1, position + 1 + txtLength)); - position += 1 + txtLength; - } - - assert.strictEqual(position, offset + dataLength); - break; - } - - case types.MX: - { - rr.priority = buffer.readInt16BE(buffer, offset); - offset += 2; - - var _readDomainFromPacket3 = readDomainFromPacket(buffer, offset), - _nread = _readDomainFromPacket3.nread, - _domain = _readDomainFromPacket3.domain; - - rr.exchange = _domain; - assert.strictEqual(_nread, dataLength); - break; - } - - case types.NS: - case types.CNAME: - case types.PTR: - { - var _readDomainFromPacket4 = readDomainFromPacket(buffer, offset), - _nread2 = _readDomainFromPacket4.nread, - _domain2 = _readDomainFromPacket4.domain; - - rr.value = _domain2; - assert.strictEqual(_nread2, dataLength); - break; - } - - case types.SOA: - { - var mname = readDomainFromPacket(buffer, offset); - var rname = readDomainFromPacket(buffer, offset + mname.nread); - rr.nsname = mname.domain; - rr.hostmaster = rname.domain; - var trailerOffset = offset + mname.nread + rname.nread; - rr.serial = buffer.readUInt32BE(trailerOffset); - rr.refresh = buffer.readUInt32BE(trailerOffset + 4); - rr.retry = buffer.readUInt32BE(trailerOffset + 8); - rr.expire = buffer.readUInt32BE(trailerOffset + 12); - rr.minttl = buffer.readUInt32BE(trailerOffset + 16); - assert.strictEqual(trailerOffset + 20, dataLength); - break; - } - - default: - throw new Error("Unknown RR type ".concat(rr.type)); - } - - offset += dataLength; - } - - parsed[sectionName].push(rr); - assert.ok(offset <= buffer.length); - } - } - - assert.strictEqual(offset, buffer.length); - return parsed; -} - -function writeIPv6(ip) { - var parts = ip.replace(/^:|:$/g, '').split(':'); - var buf = Buffer.alloc(16); - var offset = 0; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = parts[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var part = _step.value; - - if (part === '') { - offset += 16 - 2 * (parts.length - 1); - } else { - buf.writeUInt16BE(parseInt(part, 16), offset); - offset += 2; - } - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - - return buf; -} - -function writeDomainName(domain) { - return Buffer.concat(domain.split('.').map(function (label) { - assert(label.length < 64); - return Buffer.concat([Buffer.from([label.length]), Buffer.from(label, 'ascii')]); - }).concat([Buffer.alloc(1)])); -} - -function writeDNSPacket(parsed) { - var buffers = []; - var kStandardResponseFlags = 0x8180; - buffers.push(new Uint16Array([parsed.id, parsed.flags === undefined ? kStandardResponseFlags : parsed.flags, parsed.questions && parsed.questions.length, parsed.answers && parsed.answers.length, parsed.authorityAnswers && parsed.authorityAnswers.length, parsed.additionalRecords && parsed.additionalRecords.length])); - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = parsed.questions[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var q = _step2.value; - assert(types[q.type]); - buffers.push(writeDomainName(q.domain)); - buffers.push(new Uint16Array([types[q.type], q.cls === undefined ? classes.IN : q.cls])); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; - - try { - for (var _iterator3 = [].concat(parsed.answers, parsed.authorityAnswers, parsed.additionalRecords)[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var rr = _step3.value; - if (!rr) continue; - assert(types[rr.type]); - buffers.push(writeDomainName(rr.domain)); - buffers.push(new Uint16Array([types[rr.type], rr.cls === undefined ? classes.IN : rr.cls])); - buffers.push(new Int32Array([rr.ttl])); - var rdLengthBuf = new Uint16Array(1); - buffers.push(rdLengthBuf); - - switch (rr.type) { - case 'A': - rdLengthBuf[0] = 4; - buffers.push(new Uint8Array(rr.address.split('.'))); - break; - - case 'AAAA': - rdLengthBuf[0] = 16; - buffers.push(writeIPv6(rr.address)); - break; - - case 'TXT': - var total = rr.entries.map(function (s) { - return s.length; - }).reduce(function (a, b) { - return a + b; - }); // Total length of all strings + 1 byte each for their lengths. - - rdLengthBuf[0] = rr.entries.length + total; - var _iteratorNormalCompletion4 = true; - var _didIteratorError4 = false; - var _iteratorError4 = undefined; - - try { - for (var _iterator4 = rr.entries[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) { - var txt = _step4.value; - buffers.push(new Uint8Array([Buffer.byteLength(txt)])); - buffers.push(Buffer.from(txt)); - } - } catch (err) { - _didIteratorError4 = true; - _iteratorError4 = err; - } finally { - try { - if (!_iteratorNormalCompletion4 && _iterator4.return != null) { - _iterator4.return(); - } - } finally { - if (_didIteratorError4) { - throw _iteratorError4; - } - } - } - - break; - - case 'MX': - rdLengthBuf[0] = 2; - buffers.push(new Uint16Array([rr.priority])); - // fall through - - case 'NS': - case 'CNAME': - case 'PTR': - { - var domain = writeDomainName(rr.exchange || rr.value); - rdLengthBuf[0] += domain.length; - buffers.push(domain); - break; - } - - case 'SOA': - { - var mname = writeDomainName(rr.nsname); - var rname = writeDomainName(rr.hostmaster); - rdLengthBuf[0] = mname.length + rname.length + 20; - buffers.push(mname, rname); - buffers.push(new Uint32Array([rr.serial, rr.refresh, rr.retry, rr.expire, rr.minttl])); - break; - } - - default: - throw new Error("Unknown RR type ".concat(rr.type)); - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3.return != null) { - _iterator3.return(); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - - return Buffer.concat(buffers.map(function (typedArray) { - var buf = Buffer.from(typedArray.buffer, typedArray.byteOffset, typedArray.byteLength); - - if (os.endianness() === 'LE') { - if (typedArray.BYTES_PER_ELEMENT === 2) buf.swap16(); - if (typedArray.BYTES_PER_ELEMENT === 4) buf.swap32(); - } - - return buf; - })); -} - -var mockedErrorCode = 'ENOTFOUND'; -var mockedSysCall = 'getaddrinfo'; - -function errorLookupMock() { - var code = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : mockedErrorCode; - var syscall = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : mockedSysCall; - return function lookupWithError(hostname, dnsopts, cb) { - var err = new Error("".concat(syscall, " ").concat(code, " ").concat(hostname)); - err.code = code; - err.errno = code; - err.syscall = syscall; - err.hostname = hostname; - cb(err); - }; -} - -module.exports = { - types: types, - classes: classes, - writeDNSPacket: writeDNSPacket, - parseDNSPacket: parseDNSPacket, - errorLookupMock: errorLookupMock, - mockedErrorCode: mockedErrorCode, - mockedSysCall: mockedSysCall -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/duplexpair.js b/test/common/duplexpair.js deleted file mode 100644 index d4277740aa..0000000000 --- a/test/common/duplexpair.js +++ /dev/null @@ -1,118 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var _require = require('../../'), - Duplex = _require.Duplex; - -var assert = require('assert'); - -var kCallback = Symbol('Callback'); -var kOtherSide = Symbol('Other'); - -var DuplexSocket = -/*#__PURE__*/ -function (_Duplex) { - _inherits(DuplexSocket, _Duplex); - - function DuplexSocket() { - var _this; - - _classCallCheck(this, DuplexSocket); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(DuplexSocket).call(this)); - _this[kCallback] = null; - _this[kOtherSide] = null; - return _this; - } - - _createClass(DuplexSocket, [{ - key: "_read", - value: function _read() { - var callback = this[kCallback]; - - if (callback) { - this[kCallback] = null; - callback(); - } - } - }, { - key: "_write", - value: function _write(chunk, encoding, callback) { - assert.notStrictEqual(this[kOtherSide], null); - assert.strictEqual(this[kOtherSide][kCallback], null); - this[kOtherSide][kCallback] = callback; - this[kOtherSide].push(chunk); - } - }, { - key: "_final", - value: function _final(callback) { - this[kOtherSide].on('end', callback); - this[kOtherSide].push(null); - } - }]); - - return DuplexSocket; -}(Duplex); - -function makeDuplexPair() { - var clientSide = new DuplexSocket(); - var serverSide = new DuplexSocket(); - clientSide[kOtherSide] = serverSide; - serverSide[kOtherSide] = clientSide; - return { - clientSide: clientSide, - serverSide: serverSide - }; -} - -module.exports = makeDuplexPair; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/fixtures.js b/test/common/fixtures.js deleted file mode 100644 index 1ddc225e71..0000000000 --- a/test/common/fixtures.js +++ /dev/null @@ -1,74 +0,0 @@ -"use strict"; - -function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); } - -function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); } - -function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); } - -function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var path = require('path'); - -var fs = require('fs'); - -var fixturesDir = path.join(__dirname, '..', 'fixtures'); - -function fixturesPath() { - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - - return path.join.apply(path, [fixturesDir].concat(args)); -} - -function readFixtureSync(args, enc) { - if (Array.isArray(args)) return fs.readFileSync(fixturesPath.apply(void 0, _toConsumableArray(args)), enc); - return fs.readFileSync(fixturesPath(args), enc); -} - -function readFixtureKey(name, enc) { - return fs.readFileSync(fixturesPath('keys', name), enc); -} - -module.exports = { - fixturesDir: fixturesDir, - path: fixturesPath, - readSync: readFixtureSync, - readKey: readFixtureKey -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/heap.js b/test/common/heap.js deleted file mode 100644 index 0675fdf453..0000000000 --- a/test/common/heap.js +++ /dev/null @@ -1,301 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var assert = require('assert'); -/**/ - - -var util = require('core-util-is'); - -util.inherits = require('inherits'); -/**/ - -var internalTestHeap; - -try { - internalTestHeap = require('internal/test/heap'); -} catch (e) { - console.log('using `test/common/heap.js` requires `--expose-internals`'); - throw e; -} - -var _internalTestHeap = internalTestHeap, - createJSHeapDump = _internalTestHeap.createJSHeapDump, - buildEmbedderGraph = _internalTestHeap.buildEmbedderGraph; - -function inspectNode(snapshot) { - return util.inspect(snapshot, { - depth: 4 - }); -} - -function isEdge(edge, _ref) { - var node_name = _ref.node_name, - edge_name = _ref.edge_name; - - // For ABI compatibility, we did not backport the virtual function - // AddEdge() with a name as last argument back to v10.x, so edge_name. - // is ignored. - // if (edge.name !== edge_name) { - // return false; - // } - // From our internal embedded graph - if (edge.to.value) { - if (edge.to.value.constructor.name !== node_name) { - return false; - } - } else if (edge.to.name !== node_name) { - return false; - } - - return true; -} - -var State = -/*#__PURE__*/ -function () { - function State() { - _classCallCheck(this, State); - - this.snapshot = createJSHeapDump(); - this.embedderGraph = buildEmbedderGraph(); - } // Validate the v8 heap snapshot - - - _createClass(State, [{ - key: "validateSnapshot", - value: function validateSnapshot(rootName, expected) { - var _ref2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, - _ref2$loose = _ref2.loose, - loose = _ref2$loose === void 0 ? false : _ref2$loose; - - var rootNodes = this.snapshot.filter(function (node) { - return node.name === rootName && node.type !== 'string'; - }); - - if (loose) { - assert(rootNodes.length >= expected.length, "Expect to find at least ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length)); - } else { - assert.strictEqual(rootNodes.length, expected.length, "Expect to find ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length)); - } - - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = expected[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var expectation = _step.value; - - if (expectation.children) { - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - var _loop = function _loop() { - var expectedEdge = _step2.value; - var check = typeof expectedEdge === 'function' ? expectedEdge : function (edge) { - return isEdge(edge, expectedEdge); - }; - var hasChild = rootNodes.some(function (node) { - return node.outgoingEdges.some(check); - }); // Don't use assert with a custom message here. Otherwise the - // inspection in the message is done eagerly and wastes a lot of CPU - // time. - - if (!hasChild) { - throw new Error('expected to find child ' + "".concat(util.inspect(expectedEdge), " in ").concat(inspectNode(rootNodes))); - } - }; - - for (var _iterator2 = expectation.children[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - _loop(); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - } - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - } // Validate our internal embedded graph representation - - }, { - key: "validateGraph", - value: function validateGraph(rootName, expected) { - var _ref3 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, - _ref3$loose = _ref3.loose, - loose = _ref3$loose === void 0 ? false : _ref3$loose; - - var rootNodes = this.embedderGraph.filter(function (node) { - return node.name === rootName; - }); - - if (loose) { - assert(rootNodes.length >= expected.length, "Expect to find at least ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length)); - } else { - assert.strictEqual(rootNodes.length, expected.length, "Expect to find ".concat(expected.length, " '").concat(rootName, "', ") + "found ".concat(rootNodes.length)); - } - - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; - - try { - for (var _iterator3 = expected[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var expectation = _step3.value; - - if (expectation.children) { - var _iteratorNormalCompletion4 = true; - var _didIteratorError4 = false; - var _iteratorError4 = undefined; - - try { - var _loop2 = function _loop2() { - var expectedEdge = _step4.value; - var check = typeof expectedEdge === 'function' ? expectedEdge : function (edge) { - return isEdge(edge, expectedEdge); - }; // Don't use assert with a custom message here. Otherwise the - // inspection in the message is done eagerly and wastes a lot of CPU - // time. - - var hasChild = rootNodes.some(function (node) { - return node.edges.some(check); - }); - - if (!hasChild) { - throw new Error('expected to find child ' + "".concat(util.inspect(expectedEdge), " in ").concat(inspectNode(rootNodes))); - } - }; - - for (var _iterator4 = expectation.children[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) { - _loop2(); - } - } catch (err) { - _didIteratorError4 = true; - _iteratorError4 = err; - } finally { - try { - if (!_iteratorNormalCompletion4 && _iterator4.return != null) { - _iterator4.return(); - } - } finally { - if (_didIteratorError4) { - throw _iteratorError4; - } - } - } - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3.return != null) { - _iterator3.return(); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - } - }, { - key: "validateSnapshotNodes", - value: function validateSnapshotNodes(rootName, expected) { - var _ref4 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, - _ref4$loose = _ref4.loose, - loose = _ref4$loose === void 0 ? false : _ref4$loose; - - this.validateSnapshot(rootName, expected, { - loose: loose - }); - this.validateGraph(rootName, expected, { - loose: loose - }); - } - }]); - - return State; -}(); - -function recordState() { - return new State(); -} - -function validateSnapshotNodes() { - var _recordState; - - return (_recordState = recordState()).validateSnapshotNodes.apply(_recordState, arguments); -} - -module.exports = { - recordState: recordState, - validateSnapshotNodes: validateSnapshotNodes -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/hijackstdio.js b/test/common/hijackstdio.js deleted file mode 100644 index b3003f468a..0000000000 --- a/test/common/hijackstdio.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ -// Hijack stdout and stderr - - -var stdWrite = {}; - -function hijackStdWritable(name, listener) { - var stream = process[name]; - - var _write = stdWrite[name] = stream.write; - - stream.writeTimes = 0; - - stream.write = function (data, callback) { - try { - listener(data); - } catch (e) { - process.nextTick(function () { - throw e; - }); - } - - _write.call(stream, data, callback); - - stream.writeTimes++; - }; -} - -function restoreWritable(name) { - process[name].write = stdWrite[name]; - delete process[name].writeTimes; -} - -module.exports = { - hijackStdout: hijackStdWritable.bind(null, 'stdout'), - hijackStderr: hijackStdWritable.bind(null, 'stderr'), - restoreStdout: restoreWritable.bind(null, 'stdout'), - restoreStderr: restoreWritable.bind(null, 'stderr') -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/http2.js b/test/common/http2.js deleted file mode 100644 index 73ea152027..0000000000 --- a/test/common/http2.js +++ /dev/null @@ -1,259 +0,0 @@ -"use strict"; - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ -// An HTTP/2 testing tool used to create mock frames for direct testing -// of HTTP/2 endpoints. - - -var kFrameData = Symbol('frame-data'); -var FLAG_EOS = 0x1; -var FLAG_ACK = 0x1; -var FLAG_EOH = 0x4; -var FLAG_PADDED = 0x8; -var PADDING = Buffer.alloc(255); -var kClientMagic = Buffer.from('505249202a20485454502f322' + 'e300d0a0d0a534d0d0a0d0a', 'hex'); -var kFakeRequestHeaders = Buffer.from('828684410f7777772e65' + '78616d706c652e636f6d', 'hex'); -var kFakeResponseHeaders = Buffer.from('4803333032580770726976617465611d' + '4d6f6e2c203231204f63742032303133' + '2032303a31333a323120474d546e1768' + '747470733a2f2f7777772e6578616d70' + '6c652e636f6d', 'hex'); - -function isUint32(val) { - return val >>> 0 === val; -} - -function isUint24(val) { - return val >>> 0 === val && val <= 0xFFFFFF; -} - -function isUint8(val) { - return val >>> 0 === val && val <= 0xFF; -} - -function write32BE(array, pos, val) { - if (!isUint32(val)) throw new RangeError('val is not a 32-bit number'); - array[pos++] = val >> 24 & 0xff; - array[pos++] = val >> 16 & 0xff; - array[pos++] = val >> 8 & 0xff; - array[pos++] = val & 0xff; -} - -function write24BE(array, pos, val) { - if (!isUint24(val)) throw new RangeError('val is not a 24-bit number'); - array[pos++] = val >> 16 & 0xff; - array[pos++] = val >> 8 & 0xff; - array[pos++] = val & 0xff; -} - -function write8(array, pos, val) { - if (!isUint8(val)) throw new RangeError('val is not an 8-bit number'); - array[pos] = val; -} - -var Frame = -/*#__PURE__*/ -function () { - function Frame(length, type, flags, id) { - _classCallCheck(this, Frame); - - this[kFrameData] = Buffer.alloc(9); - write24BE(this[kFrameData], 0, length); - write8(this[kFrameData], 3, type); - write8(this[kFrameData], 4, flags); - write32BE(this[kFrameData], 5, id); - } - - _createClass(Frame, [{ - key: "data", - get: function get() { - return this[kFrameData]; - } - }]); - - return Frame; -}(); - -var SettingsFrame = -/*#__PURE__*/ -function (_Frame) { - _inherits(SettingsFrame, _Frame); - - function SettingsFrame() { - var ack = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; - - _classCallCheck(this, SettingsFrame); - - var flags = 0; - if (ack) flags |= FLAG_ACK; - return _possibleConstructorReturn(this, _getPrototypeOf(SettingsFrame).call(this, 0, 4, flags, 0)); - } - - return SettingsFrame; -}(Frame); - -var DataFrame = -/*#__PURE__*/ -function (_Frame2) { - _inherits(DataFrame, _Frame2); - - function DataFrame(id, payload) { - var _this; - - var padlen = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; - var final = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false; - - _classCallCheck(this, DataFrame); - - var len = payload.length; - var flags = 0; - if (final) flags |= FLAG_EOS; - var buffers = [payload]; - - if (padlen > 0) { - buffers.unshift(Buffer.from([padlen])); - buffers.push(PADDING.slice(0, padlen)); - len += padlen + 1; - flags |= FLAG_PADDED; - } - - _this = _possibleConstructorReturn(this, _getPrototypeOf(DataFrame).call(this, len, 0, flags, id)); - buffers.unshift(_this[kFrameData]); - _this[kFrameData] = Buffer.concat(buffers); - return _this; - } - - return DataFrame; -}(Frame); - -var HeadersFrame = -/*#__PURE__*/ -function (_Frame3) { - _inherits(HeadersFrame, _Frame3); - - function HeadersFrame(id, payload) { - var _this2; - - var padlen = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; - var final = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false; - - _classCallCheck(this, HeadersFrame); - - var len = payload.length; - var flags = FLAG_EOH; - if (final) flags |= FLAG_EOS; - var buffers = [payload]; - - if (padlen > 0) { - buffers.unshift(Buffer.from([padlen])); - buffers.push(PADDING.slice(0, padlen)); - len += padlen + 1; - flags |= FLAG_PADDED; - } - - _this2 = _possibleConstructorReturn(this, _getPrototypeOf(HeadersFrame).call(this, len, 1, flags, id)); - buffers.unshift(_this2[kFrameData]); - _this2[kFrameData] = Buffer.concat(buffers); - return _this2; - } - - return HeadersFrame; -}(Frame); - -var PingFrame = -/*#__PURE__*/ -function (_Frame4) { - _inherits(PingFrame, _Frame4); - - function PingFrame() { - var _this3; - - var ack = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; - - _classCallCheck(this, PingFrame); - - var buffers = [Buffer.alloc(8)]; - _this3 = _possibleConstructorReturn(this, _getPrototypeOf(PingFrame).call(this, 8, 6, ack ? 1 : 0, 0)); - buffers.unshift(_this3[kFrameData]); - _this3[kFrameData] = Buffer.concat(buffers); - return _this3; - } - - return PingFrame; -}(Frame); - -var AltSvcFrame = -/*#__PURE__*/ -function (_Frame5) { - _inherits(AltSvcFrame, _Frame5); - - function AltSvcFrame(size) { - var _this4; - - _classCallCheck(this, AltSvcFrame); - - var buffers = [Buffer.alloc(size)]; - _this4 = _possibleConstructorReturn(this, _getPrototypeOf(AltSvcFrame).call(this, size, 10, 0, 0)); - buffers.unshift(_this4[kFrameData]); - _this4[kFrameData] = Buffer.concat(buffers); - return _this4; - } - - return AltSvcFrame; -}(Frame); - -module.exports = { - Frame: Frame, - AltSvcFrame: AltSvcFrame, - DataFrame: DataFrame, - HeadersFrame: HeadersFrame, - SettingsFrame: SettingsFrame, - PingFrame: PingFrame, - kFakeRequestHeaders: kFakeRequestHeaders, - kFakeResponseHeaders: kFakeResponseHeaders, - kClientMagic: kClientMagic -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/index.js b/test/common/index.js deleted file mode 100644 index 8c7f9d030a..0000000000 --- a/test/common/index.js +++ /dev/null @@ -1,950 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); } - -function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); } - -function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); } - -function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/* eslint-disable node-core/required-modules, node-core/crypto-check */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var process = global.process; // Some tests tamper with the process global. - -var path = require('path'); - -var fs = require('fs'); - -var assert = require('assert'); - -var os = require('os'); - -var _require = require('child_process'), - exec = _require.exec, - execSync = _require.execSync, - spawnSync = _require.spawnSync; -/**/ - - -var util = require('core-util-is'); - -util.inherits = require('inherits'); -/**/ - -var Timer = { - now: function now() {} -}; - -var tmpdir = require('./tmpdir'); - -var _process$binding = process.binding('config'), - bits = _process$binding.bits, - hasIntl = _process$binding.hasIntl; - -var noop = function noop() {}; - -var hasCrypto = true; - -var isMainThread = function () { - if (false) { - return require('worker_threads').isMainThread; - } // Worker module not enabled → only a single main thread exists. - - - return true; -}(); // Check for flags. Skip this for workers (both, the `cluster` module and -// `worker_threads`) and child processes. - - -if (false && isMainThread && module.parent && require('cluster').isMaster) { - // The copyright notice is relatively big and the flags could come afterwards. - var bytesToRead = 1500; - var buffer = Buffer.allocUnsafe(bytesToRead); - var fd = fs.openSync(module.parent.filename, 'r'); - var bytesRead = fs.readSync(fd, buffer, 0, bytesToRead); - fs.closeSync(fd); - var source = buffer.toString('utf8', 0, bytesRead); - var flagStart = source.indexOf('// Flags: --') + 10; - - if (flagStart !== 9) { - var flagEnd = source.indexOf('\n', flagStart); // Normalize different EOL. - - if (source[flagEnd - 1] === '\r') { - flagEnd--; - } - - var flags = source.substring(flagStart, flagEnd).replace(/_/g, '-').split(' '); - var args = process.execArgv.map(function (arg) { - return arg.replace(/_/g, '-'); - }); - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = flags[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var flag = _step.value; - - if (!args.includes(flag) && // If the binary was built without-ssl then the crypto flags are - // invalid (bad option). The test itself should handle this case. - hasCrypto && ( // If the binary is build without `intl` the inspect option is - // invalid. The test itself should handle this case. - process.config.variables.v8_enable_inspector !== 0 || !flag.startsWith('--inspect'))) { - throw new Error("Test has to be started with the flag: '".concat(flag, "'")); - } - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - } -} - -var isWindows = process.platform === 'win32'; -var isAIX = process.platform === 'aix'; -var isLinuxPPCBE = process.platform === 'linux' && process.arch === 'ppc64' && os.endianness() === 'BE'; -var isSunOS = process.platform === 'sunos'; -var isFreeBSD = process.platform === 'freebsd'; -var isOpenBSD = process.platform === 'openbsd'; -var isLinux = process.platform === 'linux'; -var isOSX = process.platform === 'darwin'; -var enoughTestMem = os.totalmem() > 0x70000000; -/* 1.75 Gb */ - -var cpus = os.cpus().length === 0 ? [{ - speed: 1000 -}] : os.cpus(); -var enoughTestCpu = Array.isArray(cpus) && (cpus.length > 1 || cpus[0].speed > 999); -var rootDir = isWindows ? 'c:\\' : '/'; -var buildType = 'readable-stream'; // If env var is set then enable async_hook hooks for all tests. - -if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { - var destroydIdsList = {}; - var destroyListList = {}; - var initHandles = {}; - var async_wrap = process.binding('async_wrap'); - process.on('exit', function () { - // iterate through handles to make sure nothing crashes - for (var k in initHandles) { - util.inspect(initHandles[k]); - } - }); - var _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId; - - async_wrap.queueDestroyAsyncId = function queueDestroyAsyncId(id) { - if (destroyListList[id] !== undefined) { - process._rawDebug(destroyListList[id]); - - process._rawDebug(); - - throw new Error("same id added to destroy list twice (".concat(id, ")")); - } - - destroyListList[id] = new Error().stack; - - _queueDestroyAsyncId(id); - }; - /*require('async_hooks').createHook({ - init(id, ty, tr, r) { - if (initHandles[id]) { - process._rawDebug( - `Is same resource: ${r === initHandles[id].resource}`); - process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`); - throw new Error(`init called twice for same id (${id})`); - } - initHandles[id] = { resource: r, stack: new Error().stack.substr(6) }; - }, - before() { }, - after() { }, - destroy(id) { - if (destroydIdsList[id] !== undefined) { - process._rawDebug(destroydIdsList[id]); - process._rawDebug(); - throw new Error(`destroy called for same id (${id})`); - } - destroydIdsList[id] = new Error().stack; - }, - }).enable();*/ - -} - -var opensslCli = null; -var inFreeBSDJail = null; -var localhostIPv4 = null; -var localIPv6Hosts = isLinux ? [// Debian/Ubuntu -'ip6-localhost', 'ip6-loopback', // SUSE -'ipv6-localhost', 'ipv6-loopback', // Typically universal -'localhost'] : ['localhost']; - -var PIPE = function () { - var localRelative = path.relative(process.cwd(), "".concat(tmpdir.path, "/")); - var pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative; - var pipeName = "node-test.".concat(process.pid, ".sock"); - return path.join(pipePrefix, pipeName); -}(); - -var hasIPv6 = function () { - var iFaces = os.networkInterfaces(); - var re = isWindows ? /Loopback Pseudo-Interface/ : /lo/; - return objectKeys(iFaces).some(function (name) { - return re.test(name) && iFaces[name].some(function (_ref) { - var family = _ref.family; - return family === 'IPv6'; - }); - }); -}(); -/* - * Check that when running a test with - * `$node --abort-on-uncaught-exception $file child` - * the process aborts. - */ - - -function childShouldThrowAndAbort() { - var testCmd = ''; - - if (!isWindows) { - // Do not create core files, as it can take a lot of disk space on - // continuous testing and developers' machines - testCmd += 'ulimit -c 0 && '; - } - - testCmd += "\"".concat(process.argv[0], "\" --abort-on-uncaught-exception "); - testCmd += "\"".concat(process.argv[1], "\" child"); - var child = exec(testCmd); - child.on('exit', function onExit(exitCode, signal) { - var errMsg = 'Test should have aborted ' + "but instead exited with exit code ".concat(exitCode) + " and signal ".concat(signal); - assert(nodeProcessAborted(exitCode, signal), errMsg); - }); -} - -function createZeroFilledFile(filename) { - var fd = fs.openSync(filename, 'w'); - fs.ftruncateSync(fd, 10 * 1024 * 1024); - fs.closeSync(fd); -} - -var pwdCommand = isWindows ? ['cmd.exe', ['/d', '/c', 'cd']] : ['pwd', []]; - -function platformTimeout(ms) { - if (process.features.debug) ms = 2 * ms; - if (global.__coverage__) ms = 4 * ms; - if (isAIX) return 2 * ms; // default localhost speed is slower on AIX - - if (process.arch !== 'arm') return ms; - var armv = process.config.variables.arm_version; - if (armv === '6') return 7 * ms; // ARMv6 - - if (armv === '7') return 2 * ms; // ARMv7 - - return ms; // ARMv8+ -} - -var knownGlobals = [Buffer, clearImmediate, clearInterval, clearTimeout, global, process, setImmediate, setInterval, setTimeout]; - -if (global.gc) { - knownGlobals.push(global.gc); -} - -if (global.DTRACE_HTTP_SERVER_RESPONSE) { - knownGlobals.push(DTRACE_HTTP_SERVER_RESPONSE); - knownGlobals.push(DTRACE_HTTP_SERVER_REQUEST); - knownGlobals.push(DTRACE_HTTP_CLIENT_RESPONSE); - knownGlobals.push(DTRACE_HTTP_CLIENT_REQUEST); - knownGlobals.push(DTRACE_NET_STREAM_END); - knownGlobals.push(DTRACE_NET_SERVER_CONNECTION); -} - -if (global.COUNTER_NET_SERVER_CONNECTION) { - knownGlobals.push(COUNTER_NET_SERVER_CONNECTION); - knownGlobals.push(COUNTER_NET_SERVER_CONNECTION_CLOSE); - knownGlobals.push(COUNTER_HTTP_SERVER_REQUEST); - knownGlobals.push(COUNTER_HTTP_SERVER_RESPONSE); - knownGlobals.push(COUNTER_HTTP_CLIENT_REQUEST); - knownGlobals.push(COUNTER_HTTP_CLIENT_RESPONSE); -} - -if (process.env.NODE_TEST_KNOWN_GLOBALS) { - var knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(','); - allowGlobals.apply(void 0, _toConsumableArray(knownFromEnv)); -} - -function allowGlobals() { - for (var _len = arguments.length, whitelist = new Array(_len), _key = 0; _key < _len; _key++) { - whitelist[_key] = arguments[_key]; - } - - knownGlobals = knownGlobals.concat(whitelist); -} -/**/ - - -if (typeof constructor == 'function') knownGlobals.push(constructor); -if (typeof DTRACE_NET_SOCKET_READ == 'function') knownGlobals.push(DTRACE_NET_SOCKET_READ); -if (typeof DTRACE_NET_SOCKET_WRITE == 'function') knownGlobals.push(DTRACE_NET_SOCKET_WRITE); -if (global.__coverage__) knownGlobals.push(__coverage__); -'console,clearImmediate,setImmediate,core,__core-js_shared__,Promise,Map,Set,WeakMap,WeakSet,Reflect,System,queueMicrotask,asap,Observable,regeneratorRuntime,_babelPolyfill'.split(',').filter(function (item) { - return typeof global[item] !== undefined; -}).forEach(function (item) { - knownGlobals.push(global[item]); -}); -/**/ - -function leakedGlobals() { - var leaked = []; - - for (var val in global) { - if (!knownGlobals.includes(global[val])) { - leaked.push(val); - } - } - - if (global.__coverage__) { - return leaked.filter(function (varname) { - return !/^(?:cov_|__cov)/.test(varname); - }); - } else { - return leaked; - } -} - -process.on('exit', function () { - var leaked = leakedGlobals(); - - if (leaked.length > 0) { - assert.fail("Unexpected global(s) found: ".concat(leaked.join(', '))); - } -}); -var mustCallChecks = []; - -function runCallChecks(exitCode) { - if (exitCode !== 0) return; - var failed = mustCallChecks.filter(function (context) { - if ('minimum' in context) { - context.messageSegment = "at least ".concat(context.minimum); - return context.actual < context.minimum; - } else { - context.messageSegment = "exactly ".concat(context.exact); - return context.actual !== context.exact; - } - }); - forEach(failed, function (context) { - console.log('Mismatched %s function calls. Expected %s, actual %d.', context.name, context.messageSegment, context.actual); - console.log(context.stack.split('\n').slice(2).join('\n')); - }); - if (failed.length) process.exit(1); -} - -function mustCall(fn, exact) { - return _mustCallInner(fn, exact, 'exact'); -} - -function mustCallAtLeast(fn, minimum) { - return _mustCallInner(fn, minimum, 'minimum'); -} - -function _mustCallInner(fn) { - var _context; - - var criteria = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1; - var field = arguments.length > 2 ? arguments[2] : undefined; - if (process._exiting) throw new Error('Cannot use common.mustCall*() in process exit handler'); - - if (typeof fn === 'number') { - criteria = fn; - fn = noop; - } else if (fn === undefined) { - fn = noop; - } - - if (typeof criteria !== 'number') throw new TypeError("Invalid ".concat(field, " value: ").concat(criteria)); - var context = (_context = {}, _defineProperty(_context, field, criteria), _defineProperty(_context, "actual", 0), _defineProperty(_context, "stack", new Error().stack), _defineProperty(_context, "name", fn.name || ''), _context); // add the exit listener only once to avoid listener leak warnings - - if (mustCallChecks.length === 0) process.on('exit', runCallChecks); - mustCallChecks.push(context); - return function () { - context.actual++; - return fn.apply(this, arguments); - }; -} - -function hasMultiLocalhost() { - var _process$binding2 = process.binding('tcp_wrap'), - TCP = _process$binding2.TCP, - TCPConstants = _process$binding2.constants; - - var t = new TCP(TCPConstants.SOCKET); - var ret = t.bind('127.0.0.2', 0); - t.close(); - return ret === 0; -} - -function skipIfEslintMissing() { - if (!fs.existsSync(path.join(__dirname, '..', '..', 'tools', 'node_modules', 'eslint'))) { - skip('missing ESLint'); - } -} - -function canCreateSymLink() { - // On Windows, creating symlinks requires admin privileges. - // We'll only try to run symlink test if we have enough privileges. - // On other platforms, creating symlinks shouldn't need admin privileges - if (isWindows) { - // whoami.exe needs to be the one from System32 - // If unix tools are in the path, they can shadow the one we want, - // so use the full path while executing whoami - var whoamiPath = path.join(process.env.SystemRoot, 'System32', 'whoami.exe'); - - try { - var output = execSync("".concat(whoamiPath, " /priv"), { - timout: 1000 - }); - return output.includes('SeCreateSymbolicLinkPrivilege'); - } catch (_e) { - return false; - } - } // On non-Windows platforms, this always returns `true` - - - return true; -} - -function getCallSite(top) { - var originalStackFormatter = Error.prepareStackTrace; - - Error.prepareStackTrace = function (err, stack) { - return "".concat(stack[0].getFileName(), ":").concat(stack[0].getLineNumber()); - }; - - var err = new Error(); - Error.captureStackTrace(err, top); // with the V8 Error API, the stack is not formatted until it is accessed - - err.stack; - Error.prepareStackTrace = originalStackFormatter; - return err.stack; -} - -function mustNotCall(msg) { - var callSite = getCallSite(mustNotCall); - return function mustNotCall() { - assert.fail("".concat(msg || 'function should not have been called', " at ").concat(callSite)); - }; -} - -function printSkipMessage(msg) { - console.log("1..0 # Skipped: ".concat(msg)); -} - -function skip(msg) { - printSkipMessage(msg); - process.exit(0); -} // Returns true if the exit code "exitCode" and/or signal name "signal" -// represent the exit code and/or signal name of a node process that aborted, -// false otherwise. - - -function nodeProcessAborted(exitCode, signal) { - // Depending on the compiler used, node will exit with either - // exit code 132 (SIGILL), 133 (SIGTRAP) or 134 (SIGABRT). - var expectedExitCodes = [132, 133, 134]; // On platforms using KSH as the default shell (like SmartOS), - // when a process aborts, KSH exits with an exit code that is - // greater than 256, and thus the exit code emitted with the 'exit' - // event is null and the signal is set to either SIGILL, SIGTRAP, - // or SIGABRT (depending on the compiler). - - var expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT']; // On Windows, 'aborts' are of 2 types, depending on the context: - // (i) Forced access violation, if --abort-on-uncaught-exception is on - // which corresponds to exit code 3221225477 (0xC0000005) - // (ii) Otherwise, _exit(134) which is called in place of abort() due to - // raising SIGABRT exiting with ambiguous exit code '3' by default - - if (isWindows) expectedExitCodes = [0xC0000005, 134]; // When using --abort-on-uncaught-exception, V8 will use - // base::OS::Abort to terminate the process. - // Depending on the compiler used, the shell or other aspects of - // the platform used to build the node binary, this will actually - // make V8 exit by aborting or by raising a signal. In any case, - // one of them (exit code or signal) needs to be set to one of - // the expected exit codes or signals. - - if (signal !== null) { - return expectedSignals.includes(signal); - } else { - return expectedExitCodes.includes(exitCode); - } -} - -function busyLoop(time) { - var startTime = Timer.now(); - var stopTime = startTime + time; - - while (Timer.now() < stopTime) {} -} - -function isAlive(pid) { - try { - process.kill(pid, 'SIGCONT'); - return true; - } catch (_unused) { - return false; - } -} - -function _expectWarning(name, expected) { - var map = new Map(expected); - return mustCall(function (warning) { - assert.strictEqual(warning.name, name); - assert.ok(map.has(warning.message), "unexpected error message: \"".concat(warning.message, "\"")); - var code = map.get(warning.message); - assert.strictEqual(warning.code, code); // Remove a warning message after it is seen so that we guarantee that we - // get each message only once. - - map.delete(expected); - }, expected.length); -} - -function expectWarningByName(name, expected, code) { - if (typeof expected === 'string') { - expected = [[expected, code]]; - } - - process.on('warning', _expectWarning(name, expected)); -} - -function expectWarningByMap(warningMap) { - var catchWarning = {}; - forEach(objectKeys(warningMap), function (name) { - var expected = warningMap[name]; - - if (!Array.isArray(expected)) { - throw new Error('warningMap entries must be arrays consisting of two ' + 'entries: [message, warningCode]'); - } - - if (!Array.isArray(expected[0])) { - if (expected.length === 0) { - return; - } - - expected = [[expected[0], expected[1]]]; - } - - catchWarning[name] = _expectWarning(name, expected); - }); - process.on('warning', function (warning) { - return catchWarning[warning.name](warning); - }); -} // accepts a warning name and description or array of descriptions or a map -// of warning names to description(s) -// ensures a warning is generated for each name/description pair - - -function expectWarning(nameOrMap, expected, code) { - if (typeof nameOrMap === 'string') { - expectWarningByName(nameOrMap, expected, code); - } else { - expectWarningByMap(nameOrMap); - } -} - -var Comparison = function Comparison(obj, keys) { - _classCallCheck(this, Comparison); - - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = keys[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var key = _step2.value; - if (key in obj) this[key] = obj[key]; - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } -}; // Useful for testing expected internal/error objects - - -function expectsError(fn, settings, exact) { - if (typeof fn !== 'function') { - exact = settings; - settings = fn; - fn = undefined; - } - - function innerFn(error) { - if (arguments.length !== 1) { - // Do not use `assert.strictEqual()` to prevent `util.inspect` from - // always being called. - assert.fail("Expected one argument, got ".concat(util.inspect(arguments))); - } - - var descriptor = Object.getOwnPropertyDescriptor(error, 'message'); // The error message should be non-enumerable - - assert.strictEqual(descriptor.enumerable, false); - var innerSettings = settings; - - if ('type' in settings) { - var type = settings.type; - - if (type !== Error && !Error.isPrototypeOf(type)) { - throw new TypeError('`settings.type` must inherit from `Error`'); - } - - var _constructor = error.constructor; - - if (_constructor.name === 'NodeError' && type.name !== 'NodeError') { - _constructor = Object.getPrototypeOf(error.constructor); - } // Add the `type` to the error to properly compare and visualize it. - - - if (!('type' in error)) error.type = _constructor; - } - - if ('message' in settings && typeof settings.message === 'object' && settings.message.test(error.message)) { - // Make a copy so we are able to modify the settings. - innerSettings = Object.create(settings, Object.getOwnPropertyDescriptors(settings)); // Visualize the message as identical in case of other errors. - - innerSettings.message = error.message; - } // Check all error properties. - - - var keys = objectKeys(settings); - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; - - try { - for (var _iterator3 = keys[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var key = _step3.value; - - if (!require('deep-strict-equal')(error[key], innerSettings[key])) { - // Create placeholder objects to create a nice output. - var a = new Comparison(error, keys); - var b = new Comparison(innerSettings, keys); - var tmpLimit = Error.stackTraceLimit; - Error.stackTraceLimit = 0; - var err = new assert.AssertionError({ - actual: a, - expected: b, - operator: 'strictEqual', - stackStartFn: assert.throws - }); - Error.stackTraceLimit = tmpLimit; - throw new assert.AssertionError({ - actual: error, - expected: settings, - operator: 'common.expectsError', - message: err.message - }); - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3.return != null) { - _iterator3.return(); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - - return true; - } - - if (fn) { - assert.throws(fn, innerFn); - return; - } - - return mustCall(innerFn, exact); -} - -function skipIfInspectorDisabled() { - if (process.config.variables.v8_enable_inspector === 0) { - skip('V8 inspector is disabled'); - } -} - -function skipIf32Bits() { - if (bits < 64) { - skip('The tested feature is not available in 32bit builds'); - } -} - -function skipIfWorker() { - if (!isMainThread) { - skip('This test only works on a main thread'); - } -} - -function getArrayBufferViews(buf) { - var buffer = buf.buffer, - byteOffset = buf.byteOffset, - byteLength = buf.byteLength; - var out = []; - var arrayBufferViews = [Int8Array, Uint8Array, Uint8ClampedArray, Int16Array, Uint16Array, Int32Array, Uint32Array, Float32Array, Float64Array, DataView]; - - for (var _i = 0, _arrayBufferViews = arrayBufferViews; _i < _arrayBufferViews.length; _i++) { - var type = _arrayBufferViews[_i]; - var _type$BYTES_PER_ELEME = type.BYTES_PER_ELEMENT, - BYTES_PER_ELEMENT = _type$BYTES_PER_ELEME === void 0 ? 1 : _type$BYTES_PER_ELEME; - - if (byteLength % BYTES_PER_ELEMENT === 0) { - out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT)); - } - } - - return out; -} - -function getBufferSources(buf) { - return [].concat(_toConsumableArray(getArrayBufferViews(buf)), [new Uint8Array(buf).buffer]); -} // Crash the process on unhandled rejections. - - -var crashOnUnhandledRejection = function crashOnUnhandledRejection(err) { - throw err; -}; - -process.on('unhandledRejection', crashOnUnhandledRejection); - -function disableCrashOnUnhandledRejection() { - process.removeListener('unhandledRejection', crashOnUnhandledRejection); -} - -function getTTYfd() { - // Do our best to grab a tty fd. - var tty = require('tty'); // Don't attempt fd 0 as it is not writable on Windows. - // Ref: ef2861961c3d9e9ed6972e1e84d969683b25cf95 - - - var ttyFd = [1, 2, 4, 5].find(tty.isatty); - - if (ttyFd === undefined) { - try { - return fs.openSync('/dev/tty'); - } catch (_unused2) { - // There aren't any tty fd's available to use. - return -1; - } - } - - return ttyFd; -} - -function runWithInvalidFD(func) { - var fd = 1 << 30; // Get first known bad file descriptor. 1 << 30 is usually unlikely to - // be an valid one. - - try { - while (fs.fstatSync(fd--) && fd > 0) { - ; - } - } catch (_unused3) { - return func(fd); - } - - printSkipMessage('Could not generate an invalid fd'); -} - -module.exports = { - allowGlobals: allowGlobals, - buildType: buildType, - busyLoop: busyLoop, - canCreateSymLink: canCreateSymLink, - childShouldThrowAndAbort: childShouldThrowAndAbort, - createZeroFilledFile: createZeroFilledFile, - disableCrashOnUnhandledRejection: disableCrashOnUnhandledRejection, - enoughTestCpu: enoughTestCpu, - enoughTestMem: enoughTestMem, - expectsError: expectsError, - expectWarning: expectWarning, - getArrayBufferViews: getArrayBufferViews, - getBufferSources: getBufferSources, - getCallSite: getCallSite, - getTTYfd: getTTYfd, - hasIntl: hasIntl, - hasCrypto: hasCrypto, - hasIPv6: hasIPv6, - hasMultiLocalhost: hasMultiLocalhost, - isAIX: isAIX, - isAlive: isAlive, - isFreeBSD: isFreeBSD, - isLinux: isLinux, - isLinuxPPCBE: isLinuxPPCBE, - isMainThread: isMainThread, - isOpenBSD: isOpenBSD, - isOSX: isOSX, - isSunOS: isSunOS, - isWindows: isWindows, - localIPv6Hosts: localIPv6Hosts, - mustCall: mustCall, - mustCallAtLeast: mustCallAtLeast, - mustNotCall: mustNotCall, - nodeProcessAborted: nodeProcessAborted, - noWarnCode: undefined, - PIPE: PIPE, - platformTimeout: platformTimeout, - printSkipMessage: printSkipMessage, - pwdCommand: pwdCommand, - rootDir: rootDir, - runWithInvalidFD: runWithInvalidFD, - skip: skip, - skipIf32Bits: skipIf32Bits, - skipIfEslintMissing: skipIfEslintMissing, - skipIfInspectorDisabled: skipIfInspectorDisabled, - skipIfWorker: skipIfWorker, - - get localhostIPv6() { - return '::1'; - }, - - get hasFipsCrypto() { - return hasCrypto && require('crypto').fips; - }, - - get inFreeBSDJail() { - if (inFreeBSDJail !== null) return inFreeBSDJail; - - if (exports.isFreeBSD && execSync('sysctl -n security.jail.jailed').toString() === '1\n') { - inFreeBSDJail = true; - } else { - inFreeBSDJail = false; - } - - return inFreeBSDJail; - }, - - get localhostIPv4() { - if (localhostIPv4 !== null) return localhostIPv4; - - if (this.inFreeBSDJail) { - // Jailed network interfaces are a bit special - since we need to jump - // through loops, as well as this being an exception case, assume the - // user will provide this instead. - if (process.env.LOCALHOST) { - localhostIPv4 = process.env.LOCALHOST; - } else { - console.error('Looks like we\'re in a FreeBSD Jail. ' + 'Please provide your default interface address ' + 'as LOCALHOST or expect some tests to fail.'); - } - } - - if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1'; - return localhostIPv4; - }, - - // opensslCli defined lazily to reduce overhead of spawnSync - get opensslCli() { - if (opensslCli !== null) return opensslCli; - - if (process.config.variables.node_shared_openssl) { - // use external command - opensslCli = 'openssl'; - } else { - // use command built from sources included in Node.js repository - opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli'); - } - - if (exports.isWindows) opensslCli += '.exe'; - var opensslCmd = spawnSync(opensslCli, ['version']); - - if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) { - // openssl command cannot be executed - opensslCli = false; - } - - return opensslCli; - }, - - get PORT() { - if (+process.env.TEST_PARALLEL) { - throw new Error('common.PORT cannot be used in a parallelized test'); - } - - return +process.env.NODE_COMMON_PORT || 12346; - } - -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/index.mjs b/test/common/index.mjs deleted file mode 100644 index d320100604..0000000000 --- a/test/common/index.mjs +++ /dev/null @@ -1,107 +0,0 @@ -/**/ - require('@babel/polyfill'); - var util = require('util'); - for (var i in util) exports[i] = util[i]; - /**/// Flags: --experimental-modules -/* eslint-disable node-core/required-modules */ -import common from './index.js'; - -const { - isMainThread, - isWindows, - isAIX, - isLinuxPPCBE, - isSunOS, - isFreeBSD, - isOpenBSD, - isLinux, - isOSX, - enoughTestMem, - enoughTestCpu, - rootDir, - buildType, - localIPv6Hosts, - opensslCli, - PIPE, - hasIPv6, - childShouldThrowAndAbort, - createZeroFilledFile, - platformTimeout, - allowGlobals, - mustCall, - mustCallAtLeast, - hasMultiLocalhost, - skipIfEslintMissing, - canCreateSymLink, - getCallSite, - mustNotCall, - printSkipMessage, - skip, - ArrayStream, - nodeProcessAborted, - busyLoop, - isAlive, - noWarnCode, - expectWarning, - expectsError, - skipIfInspectorDisabled, - skipIf32Bits, - getArrayBufferViews, - getBufferSources, - disableCrashOnUnhandledRejection, - getTTYfd, - runWithInvalidFD -} = common; - -export { - isMainThread, - isWindows, - isAIX, - isLinuxPPCBE, - isSunOS, - isFreeBSD, - isOpenBSD, - isLinux, - isOSX, - enoughTestMem, - enoughTestCpu, - rootDir, - buildType, - localIPv6Hosts, - opensslCli, - PIPE, - hasIPv6, - childShouldThrowAndAbort, - createZeroFilledFile, - platformTimeout, - allowGlobals, - mustCall, - mustCallAtLeast, - hasMultiLocalhost, - skipIfEslintMissing, - canCreateSymLink, - getCallSite, - mustNotCall, - printSkipMessage, - skip, - ArrayStream, - nodeProcessAborted, - busyLoop, - isAlive, - noWarnCode, - expectWarning, - expectsError, - skipIfInspectorDisabled, - skipIf32Bits, - getArrayBufferViews, - getBufferSources, - disableCrashOnUnhandledRejection, - getTTYfd, - runWithInvalidFD -}; - -function forEach (xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} diff --git a/test/common/inspector-helper.js b/test/common/inspector-helper.js deleted file mode 100644 index f90d43220b..0000000000 --- a/test/common/inspector-helper.js +++ /dev/null @@ -1,789 +0,0 @@ -"use strict"; - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var common = require('../common'); - -var assert = require('assert'); - -var fs = require('fs'); - -var http = require('http'); - -var fixtures = require('../common/fixtures'); - -var _require = require('child_process'), - spawn = _require.spawn; - -var _require2 = require('url'), - parseURL = _require2.parse; - -var _require3 = require('internal/url'), - pathToFileURL = _require3.pathToFileURL; - -var _require4 = require('events'), - EventEmitter = _require4.EventEmitter; - -var _MAINSCRIPT = fixtures.path('loop.js'); - -var DEBUG = false; -var TIMEOUT = common.platformTimeout(15 * 1000); - -function spawnChildProcess(inspectorFlags, scriptContents, scriptFile) { - var args = [].concat(inspectorFlags); - - if (scriptContents) { - args.push('-e', scriptContents); - } else { - args.push(scriptFile); - } - - var child = spawn(process.execPath, args); - var handler = tearDown.bind(null, child); - process.on('exit', handler); - process.on('uncaughtException', handler); - common.disableCrashOnUnhandledRejection(); - process.on('unhandledRejection', handler); - process.on('SIGINT', handler); - return child; -} - -function makeBufferingDataCallback(dataCallback) { - var buffer = Buffer.alloc(0); - return function (data) { - var newData = Buffer.concat([buffer, data]); - var str = newData.toString('utf8'); - var lines = str.replace(/\r/g, '').split('\n'); - if (str.endsWith('\n')) buffer = Buffer.alloc(0);else buffer = Buffer.from(lines.pop(), 'utf8'); - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = lines[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var line = _step.value; - dataCallback(line); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - }; -} - -function tearDown(child, err) { - child.kill(); - - if (err) { - console.error(err); - process.exit(1); - } -} - -function parseWSFrame(buffer) { - // Protocol described in https://tools.ietf.org/html/rfc6455#section-5 - var message = null; - if (buffer.length < 2) return { - length: 0, - message: message - }; - - if (buffer[0] === 0x88 && buffer[1] === 0x00) { - return { - length: 2, - message: message, - closed: true - }; - } - - assert.strictEqual(buffer[0], 0x81); - var dataLen = 0x7F & buffer[1]; - var bodyOffset = 2; - if (buffer.length < bodyOffset + dataLen) return 0; - - if (dataLen === 126) { - dataLen = buffer.readUInt16BE(2); - bodyOffset = 4; - } else if (dataLen === 127) { - assert(buffer[2] === 0 && buffer[3] === 0, 'Inspector message too big'); - dataLen = buffer.readUIntBE(4, 6); - bodyOffset = 10; - } - - if (buffer.length < bodyOffset + dataLen) return { - length: 0, - message: message - }; - var jsonPayload = buffer.slice(bodyOffset, bodyOffset + dataLen).toString('utf8'); - - try { - message = JSON.parse(jsonPayload); - } catch (e) { - console.error("JSON.parse() failed for: ".concat(jsonPayload)); - throw e; - } - - if (DEBUG) console.log('[received]', JSON.stringify(message)); - return { - length: bodyOffset + dataLen, - message: message - }; -} - -function formatWSFrame(message) { - var messageBuf = Buffer.from(JSON.stringify(message)); - var wsHeaderBuf = Buffer.allocUnsafe(16); - wsHeaderBuf.writeUInt8(0x81, 0); - var byte2 = 0x80; - var bodyLen = messageBuf.length; - var maskOffset = 2; - - if (bodyLen < 126) { - byte2 = 0x80 + bodyLen; - } else if (bodyLen < 65536) { - byte2 = 0xFE; - wsHeaderBuf.writeUInt16BE(bodyLen, 2); - maskOffset = 4; - } else { - byte2 = 0xFF; - wsHeaderBuf.writeUInt32BE(bodyLen, 2); - wsHeaderBuf.writeUInt32BE(0, 6); - maskOffset = 10; - } - - wsHeaderBuf.writeUInt8(byte2, 1); - wsHeaderBuf.writeUInt32BE(0x01020408, maskOffset); - - for (var _i = 0; _i < messageBuf.length; _i++) { - messageBuf[_i] = messageBuf[_i] ^ 1 << _i % 4; - } - - return Buffer.concat([wsHeaderBuf.slice(0, maskOffset + 4), messageBuf]); -} - -var InspectorSession = -/*#__PURE__*/ -function () { - function InspectorSession(socket, instance) { - var _this = this; - - _classCallCheck(this, InspectorSession); - - this._instance = instance; - this._socket = socket; - this._nextId = 1; - this._commandResponsePromises = new Map(); - this._unprocessedNotifications = []; - this._notificationCallback = null; - this._scriptsIdsByUrl = new Map(); - var buffer = Buffer.alloc(0); - socket.on('data', function (data) { - buffer = Buffer.concat([buffer, data]); - - do { - var _parseWSFrame = parseWSFrame(buffer), - length = _parseWSFrame.length, - message = _parseWSFrame.message, - closed = _parseWSFrame.closed; - - if (!length) break; - - if (closed) { - socket.write(Buffer.from([0x88, 0x00])); // WS close frame - } - - buffer = buffer.slice(length); - if (message) _this._onMessage(message); - } while (true); - }); - this._terminationPromise = new Promise(function (resolve) { - socket.once('close', resolve); - }); - } - - _createClass(InspectorSession, [{ - key: "waitForServerDisconnect", - value: function waitForServerDisconnect() { - return this._terminationPromise; - } - }, { - key: "disconnect", - value: function () { - var _disconnect = _asyncToGenerator(function* () { - this._socket.destroy(); - - return this.waitForServerDisconnect(); - }); - - function disconnect() { - return _disconnect.apply(this, arguments); - } - - return disconnect; - }() - }, { - key: "_onMessage", - value: function _onMessage(message) { - if (message.id) { - var _this$_commandRespons = this._commandResponsePromises.get(message.id), - resolve = _this$_commandRespons.resolve, - reject = _this$_commandRespons.reject; - - this._commandResponsePromises.delete(message.id); - - if (message.result) resolve(message.result);else reject(message.error); - } else { - if (message.method === 'Debugger.scriptParsed') { - var _message$params = message.params, - scriptId = _message$params.scriptId, - url = _message$params.url; - - this._scriptsIdsByUrl.set(scriptId, url); - - var fileUrl = url.startsWith('file:') ? url : pathToFileURL(url).toString(); - - if (fileUrl === this.scriptURL().toString()) { - this.mainScriptId = scriptId; - } - } - - if (this._notificationCallback) { - // In case callback needs to install another - var callback = this._notificationCallback; - this._notificationCallback = null; - callback(message); - } else { - this._unprocessedNotifications.push(message); - } - } - } - }, { - key: "_sendMessage", - value: function _sendMessage(message) { - var _this2 = this; - - var msg = JSON.parse(JSON.stringify(message)); // Clone! - - msg.id = this._nextId++; - if (DEBUG) console.log('[sent]', JSON.stringify(msg)); - var responsePromise = new Promise(function (resolve, reject) { - _this2._commandResponsePromises.set(msg.id, { - resolve: resolve, - reject: reject - }); - }); - return new Promise(function (resolve) { - return _this2._socket.write(formatWSFrame(msg), resolve); - }).then(function () { - return responsePromise; - }); - } - }, { - key: "send", - value: function send(commands) { - var _this3 = this; - - if (Array.isArray(commands)) { - // Multiple commands means the response does not matter. There might even - // never be a response. - return Promise.all(commands.map(function (command) { - return _this3._sendMessage(command); - })).then(function () {}); - } else { - return this._sendMessage(commands); - } - } - }, { - key: "waitForNotification", - value: function waitForNotification(methodOrPredicate, description) { - var desc = description || methodOrPredicate; - var message = "Timed out waiting for matching notification (".concat(desc, "))"); - return fires(this._asyncWaitForNotification(methodOrPredicate), message, TIMEOUT); - } - }, { - key: "_asyncWaitForNotification", - value: function () { - var _asyncWaitForNotification2 = _asyncToGenerator(function* (methodOrPredicate) { - var _this4 = this; - - function matchMethod(notification) { - return notification.method === methodOrPredicate; - } - - var predicate = typeof methodOrPredicate === 'string' ? matchMethod : methodOrPredicate; - var notification = null; - - do { - if (this._unprocessedNotifications.length) { - notification = this._unprocessedNotifications.shift(); - } else { - notification = yield new Promise(function (resolve) { - return _this4._notificationCallback = resolve; - }); - } - } while (!predicate(notification)); - - return notification; - }); - - function _asyncWaitForNotification(_x) { - return _asyncWaitForNotification2.apply(this, arguments); - } - - return _asyncWaitForNotification; - }() - }, { - key: "_isBreakOnLineNotification", - value: function _isBreakOnLineNotification(message, line, expectedScriptPath) { - if (message.method === 'Debugger.paused') { - var callFrame = message.params.callFrames[0]; - var location = callFrame.location; - - var scriptPath = this._scriptsIdsByUrl.get(location.scriptId); - - assert.strictEqual(scriptPath.toString(), expectedScriptPath.toString(), "".concat(scriptPath, " !== ").concat(expectedScriptPath)); - assert.strictEqual(location.lineNumber, line); - return true; - } - } - }, { - key: "waitForBreakOnLine", - value: function waitForBreakOnLine(line, url) { - var _this5 = this; - - return this.waitForNotification(function (notification) { - return _this5._isBreakOnLineNotification(notification, line, url); - }, "break on ".concat(url, ":").concat(line)); - } - }, { - key: "_matchesConsoleOutputNotification", - value: function _matchesConsoleOutputNotification(notification, type, values) { - if (!Array.isArray(values)) values = [values]; - - if (notification.method === 'Runtime.consoleAPICalled') { - var params = notification.params; - - if (params.type === type) { - var _i2 = 0; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = params.args[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var value = _step2.value; - if (value.value !== values[_i2++]) return false; - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - - return _i2 === values.length; - } - } - } - }, { - key: "waitForConsoleOutput", - value: function waitForConsoleOutput(type, values) { - var _this6 = this; - - var desc = "Console output matching ".concat(JSON.stringify(values)); - return this.waitForNotification(function (notification) { - return _this6._matchesConsoleOutputNotification(notification, type, values); - }, desc); - } - }, { - key: "runToCompletion", - value: function () { - var _runToCompletion = _asyncToGenerator(function* () { - console.log('[test]', 'Verify node waits for the frontend to disconnect'); - yield this.send({ - 'method': 'Debugger.resume' - }); - yield this.waitForNotification(function (notification) { - return notification.method === 'Runtime.executionContextDestroyed' && notification.params.executionContextId === 1; - }); - - while ((yield this._instance.nextStderrString()) !== 'Waiting for the debugger to disconnect...') { - ; - } - - yield this.disconnect(); - }); - - function runToCompletion() { - return _runToCompletion.apply(this, arguments); - } - - return runToCompletion; - }() - }, { - key: "scriptPath", - value: function scriptPath() { - return this._instance.scriptPath(); - } - }, { - key: "script", - value: function script() { - return this._instance.script(); - } - }, { - key: "scriptURL", - value: function scriptURL() { - return pathToFileURL(this.scriptPath()); - } - }]); - - return InspectorSession; -}(); - -var NodeInstance = -/*#__PURE__*/ -function (_EventEmitter) { - _inherits(NodeInstance, _EventEmitter); - - function NodeInstance() { - var _this7; - - var inspectorFlags = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['--inspect-brk=0']; - var scriptContents = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; - var scriptFile = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : _MAINSCRIPT; - - _classCallCheck(this, NodeInstance); - - _this7 = _possibleConstructorReturn(this, _getPrototypeOf(NodeInstance).call(this)); - _this7._scriptPath = scriptFile; - _this7._script = scriptFile ? null : scriptContents; - _this7._portCallback = null; - _this7.portPromise = new Promise(function (resolve) { - return _this7._portCallback = resolve; - }); - _this7._process = spawnChildProcess(inspectorFlags, scriptContents, scriptFile); - _this7._running = true; - _this7._stderrLineCallback = null; - _this7._unprocessedStderrLines = []; - - _this7._process.stdout.on('data', makeBufferingDataCallback(function (line) { - _this7.emit('stdout', line); - - console.log('[out]', line); - })); - - _this7._process.stderr.on('data', makeBufferingDataCallback(function (message) { - return _this7.onStderrLine(message); - })); - - _this7._shutdownPromise = new Promise(function (resolve) { - _this7._process.once('exit', function (exitCode, signal) { - resolve({ - exitCode: exitCode, - signal: signal - }); - _this7._running = false; - }); - }); - return _this7; - } - - _createClass(NodeInstance, [{ - key: "onStderrLine", - value: function onStderrLine(line) { - console.log('[err]', line); - - if (this._portCallback) { - var matches = line.match(/Debugger listening on ws:\/\/.+:(\d+)\/.+/); - - if (matches) { - this._portCallback(matches[1]); - - this._portCallback = null; - } - } - - if (this._stderrLineCallback) { - this._stderrLineCallback(line); - - this._stderrLineCallback = null; - } else { - this._unprocessedStderrLines.push(line); - } - } - }, { - key: "httpGet", - value: function httpGet(host, path, hostHeaderValue) { - console.log('[test]', "Testing ".concat(path)); - var headers = hostHeaderValue ? { - 'Host': hostHeaderValue - } : null; - return this.portPromise.then(function (port) { - return new Promise(function (resolve, reject) { - var req = http.get({ - host: host, - port: port, - path: path, - headers: headers - }, function (res) { - var response = ''; - res.setEncoding('utf8'); - res.on('data', function (data) { - return response += data.toString(); - }).on('end', function () { - resolve(response); - }); - }); - req.on('error', reject); - }); - }).then(function (response) { - try { - return JSON.parse(response); - } catch (e) { - e.body = response; - throw e; - } - }); - } - }, { - key: "sendUpgradeRequest", - value: function () { - var _sendUpgradeRequest = _asyncToGenerator(function* () { - var response = yield this.httpGet(null, '/json/list'); - var devtoolsUrl = response[0].webSocketDebuggerUrl; - var port = yield this.portPromise; - return http.get({ - port: port, - path: parseURL(devtoolsUrl).path, - headers: { - 'Connection': 'Upgrade', - 'Upgrade': 'websocket', - 'Sec-WebSocket-Version': 13, - 'Sec-WebSocket-Key': 'key==' - } - }); - }); - - function sendUpgradeRequest() { - return _sendUpgradeRequest.apply(this, arguments); - } - - return sendUpgradeRequest; - }() - }, { - key: "connectInspectorSession", - value: function () { - var _connectInspectorSession = _asyncToGenerator(function* () { - var _this8 = this; - - console.log('[test]', 'Connecting to a child Node process'); - var upgradeRequest = yield this.sendUpgradeRequest(); - return new Promise(function (resolve) { - upgradeRequest.on('upgrade', function (message, socket) { - return resolve(new InspectorSession(socket, _this8)); - }).on('response', common.mustNotCall('Upgrade was not received')); - }); - }); - - function connectInspectorSession() { - return _connectInspectorSession.apply(this, arguments); - } - - return connectInspectorSession; - }() - }, { - key: "expectConnectionDeclined", - value: function () { - var _expectConnectionDeclined = _asyncToGenerator(function* () { - console.log('[test]', 'Checking upgrade is not possible'); - var upgradeRequest = yield this.sendUpgradeRequest(); - return new Promise(function (resolve) { - upgradeRequest.on('upgrade', common.mustNotCall('Upgrade was received')).on('response', function (response) { - return response.on('data', function () {}).on('end', function () { - return resolve(response.statusCode); - }); - }); - }); - }); - - function expectConnectionDeclined() { - return _expectConnectionDeclined.apply(this, arguments); - } - - return expectConnectionDeclined; - }() - }, { - key: "expectShutdown", - value: function expectShutdown() { - return this._shutdownPromise; - } - }, { - key: "nextStderrString", - value: function nextStderrString() { - var _this9 = this; - - if (this._unprocessedStderrLines.length) return Promise.resolve(this._unprocessedStderrLines.shift()); - return new Promise(function (resolve) { - return _this9._stderrLineCallback = resolve; - }); - } - }, { - key: "write", - value: function write(message) { - this._process.stdin.write(message); - } - }, { - key: "kill", - value: function kill() { - this._process.kill(); - - return this.expectShutdown(); - } - }, { - key: "scriptPath", - value: function scriptPath() { - return this._scriptPath; - } - }, { - key: "script", - value: function script() { - if (this._script === null) this._script = fs.readFileSync(this.scriptPath(), 'utf8'); - return this._script; - } - }], [{ - key: "startViaSignal", - value: function () { - var _startViaSignal = _asyncToGenerator(function* (scriptContents) { - var instance = new NodeInstance([], "".concat(scriptContents, "\nprocess._rawDebug('started');"), undefined); - var msg = 'Timed out waiting for process to start'; - - while ((yield fires(instance.nextStderrString(), msg, TIMEOUT)) !== 'started') {} - - process._debugProcess(instance._process.pid); - - return instance; - }); - - function startViaSignal(_x2) { - return _startViaSignal.apply(this, arguments); - } - - return startViaSignal; - }() - }]); - - return NodeInstance; -}(EventEmitter); - -function onResolvedOrRejected(promise, callback) { - return promise.then(function (result) { - callback(); - return result; - }, function (error) { - callback(); - throw error; - }); -} - -function timeoutPromise(error, timeoutMs) { - var clearCallback = null; - var done = false; - var promise = onResolvedOrRejected(new Promise(function (resolve, reject) { - var timeout = setTimeout(function () { - return reject(error); - }, timeoutMs); - - clearCallback = function clearCallback() { - if (done) return; - clearTimeout(timeout); - resolve(); - }; - }), function () { - return done = true; - }); - promise.clear = clearCallback; - return promise; -} // Returns a new promise that will propagate `promise` resolution or rejection -// if that happens within the `timeoutMs` timespan, or rejects with `error` as -// a reason otherwise. - - -function fires(promise, error, timeoutMs) { - var timeout = timeoutPromise(error, timeoutMs); - return Promise.race([onResolvedOrRejected(promise, function () { - return timeout.clear(); - }), timeout]); -} - -module.exports = { - NodeInstance: NodeInstance -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/internet.js b/test/common/internet.js deleted file mode 100644 index e34dabbe21..0000000000 --- a/test/common/internet.js +++ /dev/null @@ -1,107 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ -// Utilities for internet-related tests - - -var addresses = { - // A generic host that has registered common DNS records, - // supports both IPv4 and IPv6, and provides basic HTTP/HTTPS services - INET_HOST: 'nodejs.org', - // A host that provides IPv4 services - INET4_HOST: 'nodejs.org', - // A host that provides IPv6 services - INET6_HOST: 'nodejs.org', - // An accessible IPv4 IP, - // defaults to the Google Public DNS IPv4 address - INET4_IP: '8.8.8.8', - // An accessible IPv6 IP, - // defaults to the Google Public DNS IPv6 address - INET6_IP: '2001:4860:4860::8888', - // An invalid host that cannot be resolved - // See https://tools.ietf.org/html/rfc2606#section-2 - INVALID_HOST: 'something.invalid', - // A host with MX records registered - MX_HOST: 'nodejs.org', - // A host with SRV records registered - SRV_HOST: '_jabber._tcp.google.com', - // A host with PTR records registered - PTR_HOST: '8.8.8.8.in-addr.arpa', - // A host with NAPTR records registered - NAPTR_HOST: 'sip2sip.info', - // A host with SOA records registered - SOA_HOST: 'nodejs.org', - // A host with CNAME records registered - CNAME_HOST: 'blog.nodejs.org', - // A host with NS records registered - NS_HOST: 'nodejs.org', - // A host with TXT records registered - TXT_HOST: 'nodejs.org', - // An accessible IPv4 DNS server - DNS4_SERVER: '8.8.8.8', - // An accessible IPv4 DNS server - DNS6_SERVER: '2001:4860:4860::8888' -}; -var _iteratorNormalCompletion = true; -var _didIteratorError = false; -var _iteratorError = undefined; - -try { - for (var _iterator = objectKeys(addresses)[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var key = _step.value; - var envName = "NODE_TEST_".concat(key); - - if (process.env[envName]) { - addresses[key] = process.env[envName]; - } - } -} catch (err) { - _didIteratorError = true; - _iteratorError = err; -} finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } -} - -module.exports = { - addresses: addresses -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/ongc.js b/test/common/ongc.js deleted file mode 100644 index c8ec0f07bc..0000000000 --- a/test/common/ongc.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var common = require('../common'); - -var assert = require('assert'); - -var gcTrackerMap = new WeakMap(); -var gcTrackerTag = 'NODE_TEST_COMMON_GC_TRACKER'; - -function onGC(obj, gcListener) { - var async_hooks = - /*require('async_hooks'); - const onGcAsyncHook = async_hooks.createHook({ - init: common.mustCallAtLeast(function(id, type) { - if (this.trackedId === undefined) { - assert.strictEqual(type, gcTrackerTag); - this.trackedId = id; - } - }), - destroy(id) { - assert.notStrictEqual(this.trackedId, -1); - if (id === this.trackedId) { - this.gcListener.ongc(); - onGcAsyncHook.disable(); - } - } - }).enable();*/ - onGcAsyncHook.gcListener = gcListener; - gcTrackerMap.set(obj, new async_hooks.AsyncResource(gcTrackerTag)); - obj = null; -} - -module.exports = onGC; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/shared-lib-util.js b/test/common/shared-lib-util.js deleted file mode 100644 index fc96fc647a..0000000000 --- a/test/common/shared-lib-util.js +++ /dev/null @@ -1,80 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var common = require('../common'); - -var path = require('path'); - -var kNodeShared = Boolean(process.config.variables.node_shared); -var kShlibSuffix = process.config.variables.shlib_suffix; -var kExecPath = path.dirname(process.execPath); // If node executable is linked to shared lib, need to take care about the -// shared lib path. - -function addLibraryPath(env) { - if (!kNodeShared) { - return; - } - - env = env || process.env; - env.LD_LIBRARY_PATH = (env.LD_LIBRARY_PATH ? env.LD_LIBRARY_PATH + path.delimiter : '') + path.join(kExecPath, 'lib.target'); // For AIX. - - env.LIBPATH = (env.LIBPATH ? env.LIBPATH + path.delimiter : '') + path.join(kExecPath, 'lib.target'); // For Mac OSX. - - env.DYLD_LIBRARY_PATH = (env.DYLD_LIBRARY_PATH ? env.DYLD_LIBRARY_PATH + path.delimiter : '') + kExecPath; // For Windows. - - env.PATH = (env.PATH ? env.PATH + path.delimiter : '') + kExecPath; -} // Get the full path of shared lib. - - -function getSharedLibPath() { - if (common.isWindows) { - return path.join(kExecPath, 'node.dll'); - } else if (common.isOSX) { - return path.join(kExecPath, "libnode.".concat(kShlibSuffix)); - } else { - return path.join(kExecPath, 'lib.target', "libnode.".concat(kShlibSuffix)); - } -} // Get the binary path of stack frames. - - -function getBinaryPath() { - return kNodeShared ? getSharedLibPath() : process.execPath; -} - -module.exports = { - addLibraryPath: addLibraryPath, - getBinaryPath: getBinaryPath, - getSharedLibPath: getSharedLibPath -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/tick.js b/test/common/tick.js deleted file mode 100644 index f4f5fb772d..0000000000 --- a/test/common/tick.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -require('../common'); - -module.exports = function tick(x, cb) { - function ontick() { - if (--x === 0) { - if (typeof cb === 'function') cb(); - } else { - setImmediate(ontick); - } - } - - setImmediate(ontick); -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/tls.js b/test/common/tls.js deleted file mode 100644 index 98b00d0e09..0000000000 --- a/test/common/tls.js +++ /dev/null @@ -1,244 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules, node-core/crypto-check */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var crypto = require('crypto'); - -var net = require('net'); - -exports.ccs = Buffer.from('140303000101', 'hex'); - -var TestTLSSocket = -/*#__PURE__*/ -function (_net$Socket) { - _inherits(TestTLSSocket, _net$Socket); - - function TestTLSSocket(server_cert) { - var _this; - - _classCallCheck(this, TestTLSSocket); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestTLSSocket).call(this)); - _this.server_cert = server_cert; - _this.version = Buffer.from('0303', 'hex'); - _this.handshake_list = []; // AES128-GCM-SHA256 - - _this.ciphers = Buffer.from('000002009c0', 'hex'); - _this.pre_master_secret = Buffer.concat([_this.version, crypto.randomBytes(46)]); - _this.master_secret = null; - _this.write_seq = 0; - _this.client_random = crypto.randomBytes(32); - - _this.on('handshake', function (msg) { - _this.handshake_list.push(msg); - }); - - _this.on('server_random', function (server_random) { - _this.master_secret = PRF12('sha256', _this.pre_master_secret, 'master secret', Buffer.concat([_this.client_random, server_random]), 48); - var key_block = PRF12('sha256', _this.master_secret, 'key expansion', Buffer.concat([server_random, _this.client_random]), 40); - _this.client_writeKey = key_block.slice(0, 16); - _this.client_writeIV = key_block.slice(32, 36); - }); - - return _this; - } - - _createClass(TestTLSSocket, [{ - key: "createClientHello", - value: function createClientHello() { - var compressions = Buffer.from('0100', 'hex'); // null - - var msg = addHandshakeHeader(0x01, Buffer.concat([this.version, this.client_random, this.ciphers, compressions])); - this.emit('handshake', msg); - return addRecordHeader(0x16, msg); - } - }, { - key: "createClientKeyExchange", - value: function createClientKeyExchange() { - var encrypted_pre_master_secret = crypto.publicEncrypt({ - key: this.server_cert, - padding: crypto.constants.RSA_PKCS1_PADDING - }, this.pre_master_secret); - var length = Buffer.alloc(2); - length.writeUIntBE(encrypted_pre_master_secret.length, 0, 2); - var msg = addHandshakeHeader(0x10, Buffer.concat([length, encrypted_pre_master_secret])); - this.emit('handshake', msg); - return addRecordHeader(0x16, msg); - } - }, { - key: "createFinished", - value: function createFinished() { - var shasum = crypto.createHash('sha256'); - shasum.update(Buffer.concat(this.handshake_list)); - var message_hash = shasum.digest(); - var r = PRF12('sha256', this.master_secret, 'client finished', message_hash, 12); - var msg = addHandshakeHeader(0x14, r); - this.emit('handshake', msg); - return addRecordHeader(0x16, msg); - } - }, { - key: "createIllegalHandshake", - value: function createIllegalHandshake() { - var illegal_handshake = Buffer.alloc(5); - return addRecordHeader(0x16, illegal_handshake); - } - }, { - key: "parseTLSFrame", - value: function parseTLSFrame(buf) { - var offset = 0; - var record = buf.slice(offset, 5); - var type = record[0]; - var length = record.slice(3, 5).readUInt16BE(0); - offset += 5; - var remaining = buf.slice(offset, offset + length); - - if (type === 0x16) { - do { - remaining = this.parseTLSHandshake(remaining); - } while (remaining.length > 0); - } - - offset += length; - return buf.slice(offset); - } - }, { - key: "parseTLSHandshake", - value: function parseTLSHandshake(buf) { - var offset = 0; - var handshake_type = buf[offset]; - - if (handshake_type === 0x02) { - var server_random = buf.slice(6, 6 + 32); - this.emit('server_random', server_random); - } - - offset += 1; - var length = buf.readUIntBE(offset, 3); - offset += 3; - var handshake = buf.slice(0, offset + length); - this.emit('handshake', handshake); - offset += length; - var remaining = buf.slice(offset); - return remaining; - } - }, { - key: "encrypt", - value: function encrypt(plain) { - var type = plain.slice(0, 1); - var version = plain.slice(1, 3); - var nonce = crypto.randomBytes(8); - var iv = Buffer.concat([this.client_writeIV.slice(0, 4), nonce]); - var bob = crypto.createCipheriv('aes-128-gcm', this.client_writeKey, iv); - var write_seq = Buffer.alloc(8); - write_seq.writeUInt32BE(this.write_seq++, 4); - var aad = Buffer.concat([write_seq, plain.slice(0, 5)]); - bob.setAAD(aad); - var encrypted1 = bob.update(plain.slice(5)); - var encrypted = Buffer.concat([encrypted1, bob.final()]); - var tag = bob.getAuthTag(); - var length = Buffer.alloc(2); - length.writeUInt16BE(nonce.length + encrypted.length + tag.length, 0); - return Buffer.concat([type, version, length, nonce, encrypted, tag]); - } - }]); - - return TestTLSSocket; -}(net.Socket); - -function addRecordHeader(type, frame) { - var record_layer = Buffer.from('0003030000', 'hex'); - record_layer[0] = type; - record_layer.writeUInt16BE(frame.length, 3); - return Buffer.concat([record_layer, frame]); -} - -function addHandshakeHeader(type, msg) { - var handshake_header = Buffer.alloc(4); - handshake_header[0] = type; - handshake_header.writeUIntBE(msg.length, 1, 3); - return Buffer.concat([handshake_header, msg]); -} - -function PRF12(algo, secret, label, seed, size) { - var newSeed = Buffer.concat([Buffer.from(label, 'utf8'), seed]); - return P_hash(algo, secret, newSeed, size); -} - -function P_hash(algo, secret, seed, size) { - var result = Buffer.alloc(size); - var hmac = crypto.createHmac(algo, secret); - hmac.update(seed); - var a = hmac.digest(); - var j = 0; - - while (j < size) { - hmac = crypto.createHmac(algo, secret); - hmac.update(a); - hmac.update(seed); - var b = hmac.digest(); - var todo = b.length; - - if (j + todo > size) { - todo = size - j; - } - - b.copy(result, j, 0, todo); - j += todo; - hmac = crypto.createHmac(algo, secret); - hmac.update(a); - a = hmac.digest(); - } - - return result; -} - -exports.TestTLSSocket = TestTLSSocket; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/tmpdir.js b/test/common/tmpdir.js deleted file mode 100644 index 4f3091de7c..0000000000 --- a/test/common/tmpdir.js +++ /dev/null @@ -1,101 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var fs = require('fs'); - -var path = require('path'); - -function rimrafSync(p) { - var st; - - try { - st = fs.lstatSync(p); - } catch (e) { - if (e.code === 'ENOENT') return; - } - - try { - if (st && st.isDirectory()) rmdirSync(p, null);else fs.unlinkSync(p); - } catch (e) { - if (e.code === 'ENOENT') return; - if (e.code === 'EPERM') return rmdirSync(p, e); - if (e.code !== 'EISDIR') throw e; - rmdirSync(p, e); - } -} - -function rmdirSync(p, originalEr) { - try { - fs.rmdirSync(p); - } catch (e) { - if (e.code === 'ENOTDIR') throw originalEr; - - if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST' || e.code === 'EPERM') { - var enc = process.platform === 'linux' ? 'buffer' : 'utf8'; - forEach(fs.readdirSync(p, enc), function (f) { - if (f instanceof Buffer) { - var buf = Buffer.concat([Buffer.from(p), Buffer.from(path.sep), f]); - rimrafSync(buf); - } else { - rimrafSync(path.join(p, f)); - } - }); - fs.rmdirSync(p); - } - } -} - -var testRoot = process.env.NODE_TEST_DIR ? fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..'); // Using a `.` prefixed name, which is the convention for "hidden" on POSIX, -// gets tools to ignore it by default or by simple rules, especially eslint. - -var tmpdirName = '.tmp'; - -if (process.env.TEST_THREAD_ID) { - tmpdirName += ".".concat(process.env.TEST_THREAD_ID); -} - -var tmpPath = path.join(testRoot, tmpdirName); - -function refresh() { - rimrafSync(this.path); - fs.mkdirSync(this.path); -} - -module.exports = { - path: tmpPath, - refresh: refresh -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/common/wpt.js b/test/common/wpt.js deleted file mode 100644 index 0d1b9c8008..0000000000 --- a/test/common/wpt.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; - -/**/ -require('@babel/polyfill'); - -var util = require('util'); - -for (var i in util) { - exports[i] = util[i]; -} -/**/ - -/* eslint-disable node-core/required-modules */ - - -'use strict'; -/**/ - - -var objectKeys = objectKeys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -var assert = require('assert'); // https://github.com/w3c/testharness.js/blob/master/testharness.js - - -module.exports = { - test: function test(fn, desc) { - try { - fn(); - } catch (err) { - console.error("In ".concat(desc, ":")); - throw err; - } - }, - assert_equals: assert.strictEqual, - assert_true: function assert_true(value, message) { - return assert.strictEqual(value, true, message); - }, - assert_false: function assert_false(value, message) { - return assert.strictEqual(value, false, message); - }, - assert_throws: function assert_throws(code, func, desc) { - assert.throws(func, function (err) { - return typeof err === 'object' && 'name' in err && err.name.startsWith(code.name); - }, desc); - }, - assert_array_equals: assert.deepStrictEqual, - assert_unreached: function assert_unreached(desc) { - assert.fail("Reached unreachable code: ".concat(desc)); - } -}; - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/test/fixtures/x1024.txt b/test/fixtures/x1024.txt deleted file mode 100644 index c6a9d2f1a5..0000000000 --- a/test/fixtures/x1024.txt +++ /dev/null @@ -1 +0,0 @@ -xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx \ No newline at end of file diff --git a/test/ours/lolex-fake-timers.js b/test/ours/lolex-fake-timers.js deleted file mode 100644 index 59af1328ea..0000000000 --- a/test/ours/lolex-fake-timers.js +++ /dev/null @@ -1,41 +0,0 @@ -require('../common'); -var tap = require('tap'); -var util = require('util'); -var assert = require('assert'); -var lolex = require('lolex'); -var stream = require('../../'); -var Transform = stream.Transform; - -function MyTransform() { - Transform.call(this); -} - -util.inherits(MyTransform, Transform); - -var clock = lolex.install({toFake: [ 'setImmediate', 'nextTick' ]}); -var stream2DataCalled = false; - -var stream = new MyTransform(); -stream.on('data', function() { - stream.on('end', function() { - - var stream2 = new MyTransform(); - stream2.on('data', function() { - stream2.on('end', function() { - stream2DataCalled = true - }); - setImmediate(function() { - stream2.end() - }); - }); - stream2.emit('data') - - }); - stream.end(); -}); -stream.emit('data'); - -clock.runAll() -clock.uninstall(); -assert(stream2DataCalled); -tap.pass('ok'); diff --git a/test/ours/test-stream-sync-write.js b/test/ours/test-stream-sync-write.js deleted file mode 100644 index bfa7be9410..0000000000 --- a/test/ours/test-stream-sync-write.js +++ /dev/null @@ -1,38 +0,0 @@ -require('../common'); -var util = require('util'); -var stream = require('../../'); -var WritableStream = stream.Writable; - - -var InternalStream = function() { - WritableStream.call(this); -}; -util.inherits(InternalStream, WritableStream); - -InternalStream.prototype._write = function(chunk, encoding, callback) { - callback(); -}; - -var internalStream = new InternalStream(); - - - -var ExternalStream = function(writable) { - this._writable = writable; - WritableStream.call(this); -}; -util.inherits(ExternalStream, WritableStream); - -ExternalStream.prototype._write = function(chunk, encoding, callback) { - this._writable.write(chunk, encoding, callback); -}; - - - -var externalStream = new ExternalStream(internalStream); - -for (var i = 0; i < 2000; i++) { - externalStream.write(i.toString()); -} - -require('tap').pass('sync done'); diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js deleted file mode 100644 index 83e91f1615..0000000000 --- a/test/parallel/test-readable-from.js +++ /dev/null @@ -1,398 +0,0 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function _awaitAsyncGenerator(value) { return new _AwaitValue(value); } - -function _wrapAsyncGenerator(fn) { return function () { return new _AsyncGenerator(fn.apply(this, arguments)); }; } - -function _AsyncGenerator(gen) { var front, back; function send(key, arg) { return new Promise(function (resolve, reject) { var request = { key: key, arg: arg, resolve: resolve, reject: reject, next: null }; if (back) { back = back.next = request; } else { front = back = request; resume(key, arg); } }); } function resume(key, arg) { try { var result = gen[key](arg); var value = result.value; var wrappedAwait = value instanceof _AwaitValue; Promise.resolve(wrappedAwait ? value.wrapped : value).then(function (arg) { if (wrappedAwait) { resume(key === "return" ? "return" : "next", arg); return; } settle(result.done ? "return" : "normal", arg); }, function (err) { resume("throw", err); }); } catch (err) { settle("throw", err); } } function settle(type, value) { switch (type) { case "return": front.resolve({ value: value, done: true }); break; case "throw": front.reject(value); break; default: front.resolve({ value: value, done: false }); break; } front = front.next; if (front) { resume(front.key, front.arg); } else { back = null; } } this._invoke = send; if (typeof gen.return !== "function") { this.return = undefined; } } - -if (typeof Symbol === "function" && Symbol.asyncIterator) { _AsyncGenerator.prototype[Symbol.asyncIterator] = function () { return this; }; } - -_AsyncGenerator.prototype.next = function (arg) { return this._invoke("next", arg); }; - -_AsyncGenerator.prototype.throw = function (arg) { return this._invoke("throw", arg); }; - -_AsyncGenerator.prototype.return = function (arg) { return this._invoke("return", arg); }; - -function _AwaitValue(value) { this.wrapped = value; } - -function _asyncIterator(iterable) { var method; if (typeof Symbol !== "undefined") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var _require = require('../common'), - mustCall = _require.mustCall; - -var once = require('events.once'); - -var _require2 = require('../../'), - Readable = _require2.Readable; - -var _require3 = require('assert/'), - strictEqual = _require3.strictEqual; - -function toReadableBasicSupport() { - return _toReadableBasicSupport.apply(this, arguments); -} - -function _toReadableBasicSupport() { - _toReadableBasicSupport = _asyncToGenerator(function* () { - function generate() { - return _generate.apply(this, arguments); - } - - function _generate() { - _generate = _wrapAsyncGenerator(function* () { - yield 'a'; - yield 'b'; - yield 'c'; - }); - return _generate.apply(this, arguments); - } - - var stream = Readable.from(generate()); - var expected = ['a', 'b', 'c']; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - - var _iteratorError; - - try { - for (var _iterator = _asyncIterator(stream), _step, _value; _step = yield _iterator.next(), _iteratorNormalCompletion = _step.done, _value = yield _step.value, !_iteratorNormalCompletion; _iteratorNormalCompletion = true) { - var chunk = _value; - strictEqual(chunk, expected.shift()); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - yield _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - }); - return _toReadableBasicSupport.apply(this, arguments); -} - -function toReadableSyncIterator() { - return _toReadableSyncIterator.apply(this, arguments); -} - -function _toReadableSyncIterator() { - _toReadableSyncIterator = _asyncToGenerator(function* () { - function* generate() { - yield 'a'; - yield 'b'; - yield 'c'; - } - - var stream = Readable.from(generate()); - var expected = ['a', 'b', 'c']; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - - var _iteratorError2; - - try { - for (var _iterator2 = _asyncIterator(stream), _step2, _value2; _step2 = yield _iterator2.next(), _iteratorNormalCompletion2 = _step2.done, _value2 = yield _step2.value, !_iteratorNormalCompletion2; _iteratorNormalCompletion2 = true) { - var chunk = _value2; - strictEqual(chunk, expected.shift()); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - yield _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - }); - return _toReadableSyncIterator.apply(this, arguments); -} - -function toReadablePromises() { - return _toReadablePromises.apply(this, arguments); -} - -function _toReadablePromises() { - _toReadablePromises = _asyncToGenerator(function* () { - var promises = [Promise.resolve('a'), Promise.resolve('b'), Promise.resolve('c')]; - var stream = Readable.from(promises); - var expected = ['a', 'b', 'c']; - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - - var _iteratorError3; - - try { - for (var _iterator3 = _asyncIterator(stream), _step3, _value3; _step3 = yield _iterator3.next(), _iteratorNormalCompletion3 = _step3.done, _value3 = yield _step3.value, !_iteratorNormalCompletion3; _iteratorNormalCompletion3 = true) { - var chunk = _value3; - strictEqual(chunk, expected.shift()); - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3.return != null) { - yield _iterator3.return(); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - }); - return _toReadablePromises.apply(this, arguments); -} - -function toReadableString() { - return _toReadableString.apply(this, arguments); -} - -function _toReadableString() { - _toReadableString = _asyncToGenerator(function* () { - var stream = Readable.from('abc'); - var expected = ['a', 'b', 'c']; - var _iteratorNormalCompletion4 = true; - var _didIteratorError4 = false; - - var _iteratorError4; - - try { - for (var _iterator4 = _asyncIterator(stream), _step4, _value4; _step4 = yield _iterator4.next(), _iteratorNormalCompletion4 = _step4.done, _value4 = yield _step4.value, !_iteratorNormalCompletion4; _iteratorNormalCompletion4 = true) { - var chunk = _value4; - strictEqual(chunk, expected.shift()); - } - } catch (err) { - _didIteratorError4 = true; - _iteratorError4 = err; - } finally { - try { - if (!_iteratorNormalCompletion4 && _iterator4.return != null) { - yield _iterator4.return(); - } - } finally { - if (_didIteratorError4) { - throw _iteratorError4; - } - } - } - }); - return _toReadableString.apply(this, arguments); -} - -function toReadableOnData() { - return _toReadableOnData.apply(this, arguments); -} - -function _toReadableOnData() { - _toReadableOnData = _asyncToGenerator(function* () { - function generate() { - return _generate2.apply(this, arguments); - } - - function _generate2() { - _generate2 = _wrapAsyncGenerator(function* () { - yield 'a'; - yield 'b'; - yield 'c'; - }); - return _generate2.apply(this, arguments); - } - - var stream = Readable.from(generate()); - var iterations = 0; - var expected = ['a', 'b', 'c']; - stream.on('data', function (chunk) { - iterations++; - strictEqual(chunk, expected.shift()); - }); - yield once(stream, 'end'); - strictEqual(iterations, 3); - }); - return _toReadableOnData.apply(this, arguments); -} - -function toReadableOnDataNonObject() { - return _toReadableOnDataNonObject.apply(this, arguments); -} - -function _toReadableOnDataNonObject() { - _toReadableOnDataNonObject = _asyncToGenerator(function* () { - function generate() { - return _generate3.apply(this, arguments); - } - - function _generate3() { - _generate3 = _wrapAsyncGenerator(function* () { - yield 'a'; - yield 'b'; - yield 'c'; - }); - return _generate3.apply(this, arguments); - } - - var stream = Readable.from(generate(), { - objectMode: false - }); - var iterations = 0; - var expected = ['a', 'b', 'c']; - stream.on('data', function (chunk) { - iterations++; - strictEqual(chunk instanceof Buffer, true); - strictEqual(chunk.toString(), expected.shift()); - }); - yield once(stream, 'end'); - strictEqual(iterations, 3); - }); - return _toReadableOnDataNonObject.apply(this, arguments); -} - -function destroysTheStreamWhenThrowing() { - return _destroysTheStreamWhenThrowing.apply(this, arguments); -} - -function _destroysTheStreamWhenThrowing() { - _destroysTheStreamWhenThrowing = _asyncToGenerator(function* () { - function generate() { - return _generate4.apply(this, arguments); - } - - function _generate4() { - _generate4 = _wrapAsyncGenerator(function* () { - throw new Error('kaboom'); - }); - return _generate4.apply(this, arguments); - } - - var stream = Readable.from(generate()); - stream.read(); - - try { - yield once(stream, 'error'); - } catch (err) { - strictEqual(err.message, 'kaboom'); - strictEqual(stream.destroyed, true); - } - }); - return _destroysTheStreamWhenThrowing.apply(this, arguments); -} - -function asTransformStream() { - return _asTransformStream.apply(this, arguments); -} - -function _asTransformStream() { - _asTransformStream = _asyncToGenerator(function* () { - function generate(_x) { - return _generate5.apply(this, arguments); - } - - function _generate5() { - _generate5 = _wrapAsyncGenerator(function* (stream) { - var _iteratorNormalCompletion6 = true; - var _didIteratorError6 = false; - - var _iteratorError6; - - try { - for (var _iterator6 = _asyncIterator(stream), _step6, _value6; _step6 = yield _awaitAsyncGenerator(_iterator6.next()), _iteratorNormalCompletion6 = _step6.done, _value6 = yield _awaitAsyncGenerator(_step6.value), !_iteratorNormalCompletion6; _iteratorNormalCompletion6 = true) { - var chunk = _value6; - yield chunk.toUpperCase(); - } - } catch (err) { - _didIteratorError6 = true; - _iteratorError6 = err; - } finally { - try { - if (!_iteratorNormalCompletion6 && _iterator6.return != null) { - yield _awaitAsyncGenerator(_iterator6.return()); - } - } finally { - if (_didIteratorError6) { - throw _iteratorError6; - } - } - } - }); - return _generate5.apply(this, arguments); - } - - var source = new Readable({ - objectMode: true, - read: function read() { - this.push('a'); - this.push('b'); - this.push('c'); - this.push(null); - } - }); - var stream = Readable.from(generate(source)); - var expected = ['A', 'B', 'C']; - var _iteratorNormalCompletion5 = true; - var _didIteratorError5 = false; - - var _iteratorError5; - - try { - for (var _iterator5 = _asyncIterator(stream), _step5, _value5; _step5 = yield _iterator5.next(), _iteratorNormalCompletion5 = _step5.done, _value5 = yield _step5.value, !_iteratorNormalCompletion5; _iteratorNormalCompletion5 = true) { - var chunk = _value5; - strictEqual(chunk, expected.shift()); - } - } catch (err) { - _didIteratorError5 = true; - _iteratorError5 = err; - } finally { - try { - if (!_iteratorNormalCompletion5 && _iterator5.return != null) { - yield _iterator5.return(); - } - } finally { - if (_didIteratorError5) { - throw _iteratorError5; - } - } - } - }); - return _asTransformStream.apply(this, arguments); -} - -Promise.all([toReadableBasicSupport(), toReadableSyncIterator(), toReadablePromises(), toReadableString(), toReadableOnData(), toReadableOnDataNonObject(), destroysTheStreamWhenThrowing(), asTransformStream()]).then(mustCall()); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js deleted file mode 100644 index 9e47f0f487..0000000000 --- a/test/parallel/test-readable-large-hwm.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; // Make sure that readable completes -// even when reading larger buffer. - - -var bufferSize = 10 * 1024 * 1024; -var n = 0; -var r = new Readable({ - read: function read() { - // Try to fill readable buffer piece by piece. - r.push(bufferShim.alloc(bufferSize / 10)); - - if (n++ > 10) { - r.push(null); - } - } -}); -r.on('readable', function () { - while (true) { - var ret = r.read(bufferSize); - if (ret === null) break; - } -}); -r.on('end', common.mustCall()); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js deleted file mode 100644 index b50c36fffa..0000000000 --- a/test/parallel/test-readable-single-end.js +++ /dev/null @@ -1,37 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; // This test ensures that there will not be an additional empty 'readable' -// event when stream has ended (only 1 event signalling about end) - - -var r = new Readable({ - read: function read() {} -}); -r.push(null); -r.on('readable', common.mustCall()); -r.on('end', common.mustCall()); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js deleted file mode 100644 index 93338b4c20..0000000000 --- a/test/parallel/test-stream-auto-destroy.js +++ /dev/null @@ -1,99 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -{ - var r = new stream.Readable({ - autoDestroy: true, - read: function read() { - this.push('hello'); - this.push('world'); - this.push(null); - }, - destroy: common.mustCall(function (err, cb) { - return cb(); - }) - }); - var ended = false; - r.resume(); - r.on('end', common.mustCall(function () { - ended = true; - })); - r.on('close', common.mustCall(function () { - assert(ended); - })); -} -{ - var w = new stream.Writable({ - autoDestroy: true, - write: function write(data, enc, cb) { - cb(null); - }, - destroy: common.mustCall(function (err, cb) { - return cb(); - }) - }); - var finished = false; - w.write('hello'); - w.write('world'); - w.end(); - w.on('finish', common.mustCall(function () { - finished = true; - })); - w.on('close', common.mustCall(function () { - assert(finished); - })); -} -{ - var t = new stream.Transform({ - autoDestroy: true, - transform: function transform(data, enc, cb) { - cb(null, data); - }, - destroy: common.mustCall(function (err, cb) { - return cb(); - }) - }); - var _ended = false; - var _finished = false; - t.write('hello'); - t.write('world'); - t.end(); - t.resume(); - t.on('end', common.mustCall(function () { - _ended = true; - })); - t.on('finish', common.mustCall(function () { - _finished = true; - })); - t.on('close', common.mustCall(function () { - assert(_ended); - assert(_finished); - })); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-backpressure.js b/test/parallel/test-stream-backpressure.js deleted file mode 100644 index 35da341a75..0000000000 --- a/test/parallel/test-stream-backpressure.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var pushes = 0; -var total = 65500 + 40 * 1024; -var rs = new stream.Readable({ - read: common.mustCall(function () { - if (pushes++ === 10) { - this.push(null); - return; - } - - var length = this._readableState.length; // We are at most doing two full runs of _reads - // before stopping, because Readable is greedy - // to keep its buffer full - - assert(length <= total); - this.push(bufferShim.alloc(65500)); - - for (var i = 0; i < 40; i++) { - this.push(bufferShim.alloc(1024)); - } // We will be over highWaterMark at this point - // but a new call to _read is scheduled anyway. - - }, 11) -}); -var ws = stream.Writable({ - write: common.mustCall(function (data, enc, cb) { - setImmediate(cb); - }, 41 * 10) -}); -rs.pipe(ws); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js deleted file mode 100644 index 063dee3e1e..0000000000 --- a/test/parallel/test-stream-big-packet.js +++ /dev/null @@ -1,125 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var passed = false; - -var TestStream = -/*#__PURE__*/ -function (_stream$Transform) { - _inherits(TestStream, _stream$Transform); - - function TestStream() { - _classCallCheck(this, TestStream); - - return _possibleConstructorReturn(this, _getPrototypeOf(TestStream).apply(this, arguments)); - } - - _createClass(TestStream, [{ - key: "_transform", - value: function _transform(chunk, encoding, done) { - if (!passed) { - // Char 'a' only exists in the last write - passed = chunk.toString().includes('a'); - } - - done(); - } - }]); - - return TestStream; -}(stream.Transform); - -var s1 = new stream.PassThrough(); -var s2 = new stream.PassThrough(); -var s3 = new TestStream(); -s1.pipe(s3); // Don't let s2 auto close which may close s3 - -s2.pipe(s3, { - end: false -}); // We must write a buffer larger than highWaterMark - -var big = bufferShim.alloc(s1.writableHighWaterMark + 1, 'x'); // Since big is larger than highWaterMark, it will be buffered internally. - -assert(!s1.write(big)); // 'tiny' is small enough to pass through internal buffer. - -assert(s2.write('tiny')); // Write some small data in next IO loop, which will never be written to s3 -// Because 'drain' event is not emitted from s1 and s1 is still paused - -setImmediate(s1.write.bind(s1), 'later'); // Assert after two IO loops when all operations have been done. - -process.on('exit', function () { - assert(passed, 'Large buffer is not handled properly by Writable Stream'); -}); - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - - return -1; -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js deleted file mode 100644 index 54ef1ca0ad..0000000000 --- a/test/parallel/test-stream-big-push.js +++ /dev/null @@ -1,95 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var str = 'asdfasdfasdfasdfasdf'; -var r = new stream.Readable({ - highWaterMark: 5, - encoding: 'utf8' -}); -var reads = 0; - -function _read() { - if (reads === 0) { - setTimeout(function () { - r.push(str); - }, 1); - reads++; - } else if (reads === 1) { - var _ret = r.push(str); - - assert.strictEqual(_ret, false); - reads++; - } else { - r.push(null); - } -} - -r._read = common.mustCall(_read, 3); -r.on('end', common.mustCall()); // push some data in to start. -// we've never gotten any read event at this point. - -var ret = r.push(str); // should be false. > hwm - -assert(!ret); -var chunk = r.read(); -assert.strictEqual(chunk, str); -chunk = r.read(); -assert.strictEqual(chunk, null); -r.once('readable', function () { - // this time, we'll get *all* the remaining data, because - // it's been added synchronously, as the read WOULD take - // us below the hwm, and so it triggered a _read() again, - // which synchronously added more, which we then return. - chunk = r.read(); - assert.strictEqual(chunk, str + str); - chunk = r.read(); - assert.strictEqual(chunk, null); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js deleted file mode 100644 index 85d1aea460..0000000000 --- a/test/parallel/test-stream-buffer-list.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var BufferList = require('../../lib/internal/streams/buffer_list'); // Test empty buffer list. - - -var emptyList = new BufferList(); -emptyList.shift(); -assert.deepStrictEqual(emptyList, new BufferList()); -assert.strictEqual(emptyList.join(','), ''); -assert.deepStrictEqual(emptyList.concat(0), bufferShim.alloc(0)); -var buf = bufferShim.from('foo'); // Test buffer list with one element. - -var list = new BufferList(); -list.push(buf); -var copy = list.concat(3); -assert.notStrictEqual(copy, buf); -assert.deepStrictEqual(copy, buf); -assert.strictEqual(list.join(','), 'foo'); -var shifted = list.shift(); -assert.strictEqual(shifted, buf); -assert.deepStrictEqual(list, new BufferList()); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-decoder-objectmode.js b/test/parallel/test-stream-decoder-objectmode.js deleted file mode 100644 index fe5a356734..0000000000 --- a/test/parallel/test-stream-decoder-objectmode.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -var readable = new stream.Readable({ - read: function read() {}, - encoding: 'utf16le', - objectMode: true -}); -readable.push(bufferShim.from('abc', 'utf16le')); -readable.push(bufferShim.from('def', 'utf16le')); -readable.push(null); // Without object mode, these would be concatenated into a single chunk. - -assert.strictEqual(readable.read(), 'abc'); -assert.strictEqual(readable.read(), 'def'); -assert.strictEqual(readable.read(), null); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-destroy-event-order.js b/test/parallel/test-stream-destroy-event-order.js deleted file mode 100644 index 12db9e626a..0000000000 --- a/test/parallel/test-stream-destroy-event-order.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable; - -var rs = new Readable({ - read: function read() {} -}); -var closed = false; -var errored = false; -rs.on('close', common.mustCall(function () { - closed = true; - assert(errored); -})); -rs.on('error', common.mustCall(function (err) { - errored = true; - assert(!closed); -})); -rs.destroy(new Error('kaboom')); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js deleted file mode 100644 index d163430858..0000000000 --- a/test/parallel/test-stream-duplex-destroy.js +++ /dev/null @@ -1,254 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Duplex = _require.Duplex; - -var assert = require('assert/'); - -{ - var duplex = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - duplex.resume(); - duplex.on('end', common.mustNotCall()); - duplex.on('finish', common.mustNotCall()); - duplex.on('close', common.mustCall()); - duplex.destroy(); - assert.strictEqual(duplex.destroyed, true); -} -{ - var _duplex = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - - _duplex.resume(); - - var expected = new Error('kaboom'); - - _duplex.on('end', common.mustNotCall()); - - _duplex.on('finish', common.mustNotCall()); - - _duplex.on('error', common.mustCall(function (err) { - assert.strictEqual(err, expected); - })); - - _duplex.destroy(expected); - - assert.strictEqual(_duplex.destroyed, true); -} -{ - var _duplex2 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - - _duplex2._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected); - cb(err); - }); - - var _expected = new Error('kaboom'); - - _duplex2.on('finish', common.mustNotCall('no finish event')); - - _duplex2.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected); - })); - - _duplex2.destroy(_expected); - - assert.strictEqual(_duplex2.destroyed, true); -} -{ - var _expected2 = new Error('kaboom'); - - var _duplex3 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {}, - destroy: common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected2); - cb(); - }) - }); - - _duplex3.resume(); - - _duplex3.on('end', common.mustNotCall('no end event')); - - _duplex3.on('finish', common.mustNotCall('no finish event')); // error is swallowed by the custom _destroy - - - _duplex3.on('error', common.mustNotCall('no error event')); - - _duplex3.on('close', common.mustCall()); - - _duplex3.destroy(_expected2); - - assert.strictEqual(_duplex3.destroyed, true); -} -{ - var _duplex4 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - - _duplex4._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(); - }); - - _duplex4.destroy(); - - assert.strictEqual(_duplex4.destroyed, true); -} -{ - var _duplex5 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - - _duplex5.resume(); - - _duplex5._destroy = common.mustCall(function (err, cb) { - var _this = this; - - assert.strictEqual(err, null); - process.nextTick(function () { - _this.push(null); - - _this.end(); - - cb(); - }); - }); - var fail = common.mustNotCall('no finish or end event'); - - _duplex5.on('finish', fail); - - _duplex5.on('end', fail); - - _duplex5.destroy(); - - _duplex5.removeListener('end', fail); - - _duplex5.removeListener('finish', fail); - - _duplex5.on('end', common.mustCall()); - - _duplex5.on('finish', common.mustCall()); - - assert.strictEqual(_duplex5.destroyed, true); -} -{ - var _duplex6 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - - var _expected3 = new Error('kaboom'); - - _duplex6._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(_expected3); - }); - - _duplex6.on('finish', common.mustNotCall('no finish event')); - - _duplex6.on('end', common.mustNotCall('no end event')); - - _duplex6.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected3); - })); - - _duplex6.destroy(); - - assert.strictEqual(_duplex6.destroyed, true); -} -{ - var _duplex7 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {}, - allowHalfOpen: true - }); - - _duplex7.resume(); - - _duplex7.on('finish', common.mustNotCall()); - - _duplex7.on('end', common.mustNotCall()); - - _duplex7.destroy(); - - assert.strictEqual(_duplex7.destroyed, true); -} -{ - var _duplex8 = new Duplex({ - write: function write(chunk, enc, cb) { - cb(); - }, - read: function read() {} - }); - - _duplex8.destroyed = true; - assert.strictEqual(_duplex8.destroyed, true); // the internal destroy() mechanism should not be triggered - - _duplex8.on('finish', common.mustNotCall()); - - _duplex8.on('end', common.mustNotCall()); - - _duplex8.destroy(); -} -{ - function MyDuplex() { - assert.strictEqual(this.destroyed, false); - this.destroyed = false; - Duplex.call(this); - } - - Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype); - Object.setPrototypeOf(MyDuplex, Duplex); - new MyDuplex(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-duplex-end.js b/test/parallel/test-stream-duplex-end.js deleted file mode 100644 index 194648950a..0000000000 --- a/test/parallel/test-stream-duplex-end.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Duplex = require('../../').Duplex; - -{ - var stream = new Duplex({ - read: function read() {} - }); - assert.strictEqual(stream.allowHalfOpen, true); - stream.on('finish', common.mustNotCall()); - assert.strictEqual(stream.listenerCount('end'), 0); - stream.resume(); - stream.push(null); -} -{ - var _stream = new Duplex({ - read: function read() {}, - allowHalfOpen: false - }); - - assert.strictEqual(_stream.allowHalfOpen, false); - - _stream.on('finish', common.mustCall()); - - assert.strictEqual(_stream.listenerCount('end'), 1); - - _stream.resume(); - - _stream.push(null); -} -{ - var _stream2 = new Duplex({ - read: function read() {}, - allowHalfOpen: false - }); - - assert.strictEqual(_stream2.allowHalfOpen, false); - _stream2._writableState.ended = true; - - _stream2.on('finish', common.mustNotCall()); - - assert.strictEqual(_stream2.listenerCount('end'), 1); - - _stream2.resume(); - - _stream2.push(null); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js deleted file mode 100644 index c32c7553d0..0000000000 --- a/test/parallel/test-stream-duplex.js +++ /dev/null @@ -1,82 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var Duplex = require('../../').Duplex; - -var stream = new Duplex({ - objectMode: true -}); -assert(Duplex() instanceof Duplex); -assert(stream._readableState.objectMode); -assert(stream._writableState.objectMode); -assert(stream.allowHalfOpen); -assert.strictEqual(stream.listenerCount('end'), 0); -var written; -var read; - -stream._write = function (obj, _, cb) { - written = obj; - cb(); -}; - -stream._read = function () {}; - -stream.on('data', function (obj) { - read = obj; -}); -stream.push({ - val: 1 -}); -stream.end({ - val: 2 -}); -process.on('exit', function () { - assert.strictEqual(read.val, 1); - assert.strictEqual(written.val, 2); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js deleted file mode 100644 index 71e4460b0b..0000000000 --- a/test/parallel/test-stream-end-paused.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); // Make sure we don't miss the end event for paused 0-length streams - - -var Readable = require('../../').Readable; - -var stream = new Readable(); -var calledRead = false; - -stream._read = function () { - assert(!calledRead); - calledRead = true; - this.push(null); -}; - -stream.on('data', function () { - throw new Error('should not ever get data'); -}); -stream.pause(); -setTimeout(common.mustCall(function () { - stream.on('end', common.mustCall()); - stream.resume(); -}), 1); -process.on('exit', function () { - assert(calledRead); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-events-prepend.js b/test/parallel/test-stream-events-prepend.js deleted file mode 100644 index adfe99df5c..0000000000 --- a/test/parallel/test-stream-events-prepend.js +++ /dev/null @@ -1,94 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var Writable = -/*#__PURE__*/ -function (_stream$Writable) { - _inherits(Writable, _stream$Writable); - - function Writable() { - var _this; - - _classCallCheck(this, Writable); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(Writable).call(this)); - _this.prependListener = undefined; - return _this; - } - - _createClass(Writable, [{ - key: "_write", - value: function _write(chunk, end, cb) { - cb(); - } - }]); - - return Writable; -}(stream.Writable); - -var Readable = -/*#__PURE__*/ -function (_stream$Readable) { - _inherits(Readable, _stream$Readable); - - function Readable() { - _classCallCheck(this, Readable); - - return _possibleConstructorReturn(this, _getPrototypeOf(Readable).apply(this, arguments)); - } - - _createClass(Readable, [{ - key: "_read", - value: function _read() { - this.push(null); - } - }]); - - return Readable; -}(stream.Readable); - -var w = new Writable(); -w.on('pipe', common.mustCall()); -var r = new Readable(); -r.pipe(w); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js deleted file mode 100644 index 086962c8b1..0000000000 --- a/test/parallel/test-stream-finished.js +++ /dev/null @@ -1,177 +0,0 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Writable = _require.Writable, - Readable = _require.Readable, - Transform = _require.Transform, - finished = _require.finished; - -var assert = require('assert/'); - -var fs = require('fs'); - -var promisify = require('util-promisify'); - -{ - var rs = new Readable({ - read: function read() {} - }); - finished(rs, common.mustCall(function (err) { - assert(!err, 'no error'); - })); - rs.push(null); - rs.resume(); -} -{ - var ws = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - finished(ws, common.mustCall(function (err) { - assert(!err, 'no error'); - })); - ws.end(); -} -{ - var tr = new Transform({ - transform: function transform(data, enc, cb) { - cb(); - } - }); - var finish = false; - var ended = false; - tr.on('end', function () { - ended = true; - }); - tr.on('finish', function () { - finish = true; - }); - finished(tr, common.mustCall(function (err) { - assert(!err, 'no error'); - assert(finish); - assert(ended); - })); - tr.end(); - tr.resume(); -} -{ - var _rs = fs.createReadStream(__filename); - - _rs.resume(); - - finished(_rs, common.mustCall()); -} -{ - var finishedPromise = promisify(finished); - - function run() { - return _run.apply(this, arguments); - } - - function _run() { - _run = _asyncToGenerator(function* () { - var rs = fs.createReadStream(__filename); - var done = common.mustCall(); - var ended = false; - rs.resume(); - rs.on('end', function () { - ended = true; - }); - yield finishedPromise(rs); - assert(ended); - done(); - }); - return _run.apply(this, arguments); - } - - run(); -} -{ - var _rs2 = fs.createReadStream('file-does-not-exist'); - - finished(_rs2, common.mustCall(function (err) { - assert.strictEqual(err.code, 'ENOENT'); - })); -} -{ - var _rs3 = new Readable(); - - finished(_rs3, common.mustCall(function (err) { - assert(!err, 'no error'); - })); - - _rs3.push(null); - - _rs3.emit('close'); // should not trigger an error - - - _rs3.resume(); -} -{ - var _rs4 = new Readable(); - - finished(_rs4, common.mustCall(function (err) { - assert(err, 'premature close error'); - })); - - _rs4.emit('close'); // should trigger error - - - _rs4.push(null); - - _rs4.resume(); -} // Test that calling returned function removes listeners - -{ - var _ws = new Writable({ - write: function write(data, env, cb) { - cb(); - } - }); - - var removeListener = finished(_ws, common.mustNotCall()); - removeListener(); - - _ws.end(); -} -{ - var _rs5 = new Readable(); - - var removeListeners = finished(_rs5, common.mustNotCall()); - removeListeners(); - - _rs5.emit('close'); - - _rs5.push(null); - - _rs5.resume(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js deleted file mode 100644 index 278f19b636..0000000000 --- a/test/parallel/test-stream-ispaused.js +++ /dev/null @@ -1,64 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var readable = new stream.Readable(); // _read is a noop, here. - -readable._read = Function(); // default state of a stream is not "paused" - -assert.ok(!readable.isPaused()); // make the stream start flowing... - -readable.on('data', Function()); // still not paused. - -assert.ok(!readable.isPaused()); -readable.pause(); -assert.ok(readable.isPaused()); -readable.resume(); -assert.ok(!readable.isPaused()); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-objectmode-undefined.js b/test/parallel/test-stream-objectmode-undefined.js deleted file mode 100644 index 0a13eb1e98..0000000000 --- a/test/parallel/test-stream-objectmode-undefined.js +++ /dev/null @@ -1,69 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable, - Transform = _require.Transform; - -{ - var stream = new Readable({ - objectMode: true, - read: common.mustCall(function () { - stream.push(undefined); - stream.push(null); - }) - }); - stream.on('data', common.mustCall(function (chunk) { - assert.strictEqual(chunk, undefined); - })); -} -{ - var _stream = new Writable({ - objectMode: true, - write: common.mustCall(function (chunk) { - assert.strictEqual(chunk, undefined); - }) - }); - - _stream.write(undefined); -} -{ - var _stream2 = new Transform({ - objectMode: true, - transform: common.mustCall(function (chunk) { - _stream2.push(chunk); - }) - }); - - _stream2.on('data', common.mustCall(function (chunk) { - assert.strictEqual(chunk, undefined); - })); - - _stream2.write(undefined); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-once-readable-pipe.js b/test/parallel/test-stream-once-readable-pipe.js deleted file mode 100644 index 1cdea29f0a..0000000000 --- a/test/parallel/test-stream-once-readable-pipe.js +++ /dev/null @@ -1,88 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable; // This test ensures that if have 'readable' listener -// on Readable instance it will not disrupt the pipe. - - -{ - var receivedData = ''; - var w = new Writable({ - write: function write(chunk, env, callback) { - receivedData += chunk; - callback(); - } - }); - var data = ['foo', 'bar', 'baz']; - var r = new Readable({ - read: function read() {} - }); - r.once('readable', common.mustCall()); - r.pipe(w); - r.push(data[0]); - r.push(data[1]); - r.push(data[2]); - r.push(null); - w.on('finish', common.mustCall(function () { - assert.strictEqual(receivedData, data.join('')); - })); -} -{ - var _receivedData = ''; - - var _w = new Writable({ - write: function write(chunk, env, callback) { - _receivedData += chunk; - callback(); - } - }); - - var _data = ['foo', 'bar', 'baz']; - - var _r = new Readable({ - read: function read() {} - }); - - _r.pipe(_w); - - _r.push(_data[0]); - - _r.push(_data[1]); - - _r.push(_data[2]); - - _r.push(null); - - _r.once('readable', common.mustCall()); - - _w.on('finish', common.mustCall(function () { - assert.strictEqual(_receivedData, _data.join('')); - })); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js deleted file mode 100644 index 5e39fce11d..0000000000 --- a/test/parallel/test-stream-pipe-after-end.js +++ /dev/null @@ -1,137 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../lib/_stream_readable'); - -var Writable = require('../../lib/_stream_writable'); - -var TestReadable = -/*#__PURE__*/ -function (_Readable) { - _inherits(TestReadable, _Readable); - - function TestReadable(opt) { - var _this; - - _classCallCheck(this, TestReadable); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReadable).call(this, opt)); - _this._ended = false; - return _this; - } - - _createClass(TestReadable, [{ - key: "_read", - value: function _read() { - if (this._ended) this.emit('error', new Error('_read called twice')); - this._ended = true; - this.push(null); - } - }]); - - return TestReadable; -}(Readable); - -var TestWritable = -/*#__PURE__*/ -function (_Writable) { - _inherits(TestWritable, _Writable); - - function TestWritable(opt) { - var _this2; - - _classCallCheck(this, TestWritable); - - _this2 = _possibleConstructorReturn(this, _getPrototypeOf(TestWritable).call(this, opt)); - _this2._written = []; - return _this2; - } - - _createClass(TestWritable, [{ - key: "_write", - value: function _write(chunk, encoding, cb) { - this._written.push(chunk); - - cb(); - } - }]); - - return TestWritable; -}(Writable); // this one should not emit 'end' until we read() from it later. - - -var ender = new TestReadable(); // what happens when you pipe() a Readable that's already ended? - -var piper = new TestReadable(); // pushes EOF null, and length=0, so this will trigger 'end' - -piper.read(); -setTimeout(common.mustCall(function () { - ender.on('end', common.mustCall()); - var c = ender.read(); - assert.strictEqual(c, null); - var w = new TestWritable(); - w.on('finish', common.mustCall()); - piper.pipe(w); -}), 1); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js deleted file mode 100644 index aacd62cdf7..0000000000 --- a/test/parallel/test-stream-pipe-await-drain-manual-resume.js +++ /dev/null @@ -1,100 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); // A consumer stream with a very low highWaterMark, which starts in a state -// where it buffers the chunk it receives rather than indicating that they -// have been consumed. - - -var writable = new stream.Writable({ - highWaterMark: 5 -}); -var isCurrentlyBufferingWrites = true; -var queue = []; - -writable._write = function (chunk, encoding, cb) { - if (isCurrentlyBufferingWrites) queue.push({ - chunk: chunk, - cb: cb - });else cb(); -}; - -var readable = new stream.Readable({ - read: function read() {} -}); -readable.pipe(writable); -readable.once('pause', common.mustCall(function () { - assert.strictEqual(readable._readableState.awaitDrain, 1, 'Expected awaitDrain to equal 1 but instead got ' + "".concat(readable._readableState.awaitDrain)); // First pause, resume manually. The next write() to writable will still - // return false, because chunks are still being buffered, so it will increase - // the awaitDrain counter again. - - process.nextTick(common.mustCall(function () { - readable.resume(); - })); - readable.once('pause', common.mustCall(function () { - assert.strictEqual(readable._readableState.awaitDrain, 1, '.resume() should not reset the counter but instead got ' + "".concat(readable._readableState.awaitDrain)); // Second pause, handle all chunks from now on. Once all callbacks that - // are currently queued up are handled, the awaitDrain drain counter should - // fall back to 0 and all chunks that are pending on the readable side - // should be flushed. - - isCurrentlyBufferingWrites = false; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = queue[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var queued = _step.value; - queued.cb(); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - })); -})); -readable.push(bufferShim.alloc(100)); // Fill the writable HWM, first 'pause'. - -readable.push(bufferShim.alloc(100)); // Second 'pause'. - -readable.push(bufferShim.alloc(100)); // Should get through to the writable. - -readable.push(null); -writable.on('finish', common.mustCall(function () { - assert.strictEqual(readable._readableState.awaitDrain, 0, 'awaitDrain should equal 0 after all chunks are written but instead got' + "".concat(readable._readableState.awaitDrain)); // Everything okay, all chunks were written. -})); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js deleted file mode 100644 index 9ee200a899..0000000000 --- a/test/parallel/test-stream-pipe-await-drain-push-while-write.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -var writable = new stream.Writable({ - write: common.mustCall(function (chunk, encoding, cb) { - assert.strictEqual(readable._readableState.awaitDrain, 0); - - if (chunk.length === 32 * 1024) { - // first chunk - readable.push(bufferShim.alloc(34 * 1024)); // above hwm - // We should check if awaitDrain counter is increased in the next - // tick, because awaitDrain is incremented after this method finished - - process.nextTick(function () { - assert.strictEqual(readable._readableState.awaitDrain, 1); - }); - } - - cb(); - }, 3) -}); // A readable stream which produces two buffers. - -var bufs = [bufferShim.alloc(32 * 1024), bufferShim.alloc(33 * 1024)]; // above hwm - -var readable = new stream.Readable({ - read: function read() { - while (bufs.length > 0) { - this.push(bufs.shift()); - } - } -}); -readable.pipe(writable); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js deleted file mode 100644 index f63ae4eeec..0000000000 --- a/test/parallel/test-stream-pipe-await-drain.js +++ /dev/null @@ -1,69 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); // This is very similar to test-stream-pipe-cleanup-pause.js. - - -var reader = new stream.Readable(); -var writer1 = new stream.Writable(); -var writer2 = new stream.Writable(); -var writer3 = new stream.Writable(); // 560000 is chosen here because it is larger than the (default) highWaterMark -// and will cause `.write()` to return false -// See: https://github.com/nodejs/node/issues/5820 - -var buffer = bufferShim.allocUnsafe(560000); - -reader._read = function () {}; - -writer1._write = common.mustCall(function (chunk, encoding, cb) { - this.emit('chunk-received'); - cb(); -}, 1); -writer1.once('chunk-received', function () { - assert.strictEqual(reader._readableState.awaitDrain, 0, 'awaitDrain initial value should be 0, actual is ' + reader._readableState.awaitDrain); - setImmediate(function () { - // This one should *not* get through to writer1 because writer2 is not - // "done" processing. - reader.push(buffer); - }); -}); // A "slow" consumer: - -writer2._write = common.mustCall(function (chunk, encoding, cb) { - assert.strictEqual(reader._readableState.awaitDrain, 1, 'awaitDrain should be 1 after first push, actual is ' + reader._readableState.awaitDrain); // Not calling cb here to "simulate" slow stream. - // This should be called exactly once, since the first .write() call - // will return false. -}, 1); -writer3._write = common.mustCall(function (chunk, encoding, cb) { - assert.strictEqual(reader._readableState.awaitDrain, 2, 'awaitDrain should be 2 after second push, actual is ' + reader._readableState.awaitDrain); // Not calling cb here to "simulate" slow stream. - // This should be called exactly once, since the first .write() call - // will return false. -}, 1); -reader.pipe(writer1); -reader.pipe(writer2); -reader.pipe(writer3); -reader.push(buffer); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js deleted file mode 100644 index 8f1e11df82..0000000000 --- a/test/parallel/test-stream-pipe-cleanup-pause.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var reader = new stream.Readable(); -var writer1 = new stream.Writable(); -var writer2 = new stream.Writable(); // 560000 is chosen here because it is larger than the (default) highWaterMark -// and will cause `.write()` to return false -// See: https://github.com/nodejs/node/issues/2323 - -var buffer = bufferShim.allocUnsafe(560000); - -reader._read = function () {}; - -writer1._write = common.mustCall(function (chunk, encoding, cb) { - this.emit('chunk-received'); - cb(); -}, 1); -writer1.once('chunk-received', function () { - reader.unpipe(writer1); - reader.pipe(writer2); - reader.push(buffer); - setImmediate(function () { - reader.push(buffer); - setImmediate(function () { - reader.push(buffer); - }); - }); -}); -writer2._write = common.mustCall(function (chunk, encoding, cb) { - cb(); -}, 3); -reader.pipe(writer1); -reader.push(buffer); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js deleted file mode 100644 index a761f38c2e..0000000000 --- a/test/parallel/test-stream-pipe-cleanup.js +++ /dev/null @@ -1,162 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ -// This test asserts that Stream.prototype.pipe does not leave listeners -// hanging on the source or dest. - - -require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -(function () { - if (/^v0\.8\./.test(process.version)) return; - - function Writable() { - this.writable = true; - this.endCalls = 0; - - require('stream').Stream.call(this); - } - - Object.setPrototypeOf(Writable.prototype, require('stream').Stream.prototype); - Object.setPrototypeOf(Writable, require('stream').Stream); - - Writable.prototype.end = function () { - this.endCalls++; - }; - - Writable.prototype.destroy = function () { - this.endCalls++; - }; - - function Readable() { - this.readable = true; - - require('stream').Stream.call(this); - } - - Object.setPrototypeOf(Readable.prototype, require('stream').Stream.prototype); - Object.setPrototypeOf(Readable, require('stream').Stream); - - function Duplex() { - this.readable = true; - Writable.call(this); - } - - Object.setPrototypeOf(Duplex.prototype, Writable.prototype); - Object.setPrototypeOf(Duplex, Writable); - var i = 0; - var limit = 100; - var w = new Writable(); - var r; - - for (i = 0; i < limit; i++) { - r = new Readable(); - r.pipe(w); - r.emit('end'); - } - - assert.strictEqual(r.listeners('end').length, 0); - assert.strictEqual(w.endCalls, limit); - w.endCalls = 0; - - for (i = 0; i < limit; i++) { - r = new Readable(); - r.pipe(w); - r.emit('close'); - } - - assert.strictEqual(r.listeners('close').length, 0); - assert.strictEqual(w.endCalls, limit); - w.endCalls = 0; - r = new Readable(); - - for (i = 0; i < limit; i++) { - w = new Writable(); - r.pipe(w); - w.emit('close'); - } - - assert.strictEqual(w.listeners('close').length, 0); - r = new Readable(); - w = new Writable(); - var d = new Duplex(); - r.pipe(d); // pipeline A - - d.pipe(w); // pipeline B - - assert.strictEqual(r.listeners('end').length, 2); // A.onend, A.cleanup - - assert.strictEqual(r.listeners('close').length, 2); // A.onclose, A.cleanup - - assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup - // A.cleanup, B.onclose, B.cleanup - - assert.strictEqual(d.listeners('close').length, 3); - assert.strictEqual(w.listeners('end').length, 0); - assert.strictEqual(w.listeners('close').length, 1); // B.cleanup - - r.emit('end'); - assert.strictEqual(d.endCalls, 1); - assert.strictEqual(w.endCalls, 0); - assert.strictEqual(r.listeners('end').length, 0); - assert.strictEqual(r.listeners('close').length, 0); - assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup - - assert.strictEqual(d.listeners('close').length, 2); // B.onclose, B.cleanup - - assert.strictEqual(w.listeners('end').length, 0); - assert.strictEqual(w.listeners('close').length, 1); // B.cleanup - - d.emit('end'); - assert.strictEqual(d.endCalls, 1); - assert.strictEqual(w.endCalls, 1); - assert.strictEqual(r.listeners('end').length, 0); - assert.strictEqual(r.listeners('close').length, 0); - assert.strictEqual(d.listeners('end').length, 0); - assert.strictEqual(d.listeners('close').length, 0); - assert.strictEqual(w.listeners('end').length, 0); - assert.strictEqual(w.listeners('close').length, 0); -})(); - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js deleted file mode 100644 index b86a4d01fb..0000000000 --- a/test/parallel/test-stream-pipe-error-handling.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Stream = require('stream').Stream; - -{ - var source = new Stream(); - var dest = new Stream(); - source.pipe(dest); - var gotErr = null; - source.on('error', function (err) { - gotErr = err; - }); - var err = new Error('This stream turned into bacon.'); - source.emit('error', err); - assert.strictEqual(gotErr, err); -} -{ - var _source = new Stream(); - - var _dest = new Stream(); - - _source.pipe(_dest); - - var _err = new Error('This stream turned into bacon.'); - - var _gotErr = null; - - try { - _source.emit('error', _err); - } catch (e) { - _gotErr = e; - } - - assert.strictEqual(_gotErr, _err); -} -{ - var R = require('../../').Readable; - - var W = require('../../').Writable; - - var r = new R(); - var w = new W(); - var removed = false; - r._read = common.mustCall(function () { - setTimeout(common.mustCall(function () { - assert(removed); - assert.throws(function () { - w.emit('error', new Error('fail')); - }, /^Error: fail$/); - }), 1); - }); - w.on('error', myOnError); - r.pipe(w); - w.removeListener('error', myOnError); - removed = true; - - function myOnError() { - throw new Error('this should not happen'); - } -} -{ - var _R = require('../../').Readable; - - var _W = require('../../').Writable; - - var _r = new _R(); - - var _w = new _W(); - - var _removed = false; - _r._read = common.mustCall(function () { - setTimeout(common.mustCall(function () { - assert(_removed); - - _w.emit('error', new Error('fail')); - }), 1); - }); - - _w.on('error', common.mustCall()); - - _w._write = function () {}; - - _r.pipe(_w); // Removing some OTHER random listener should not do anything - - - _w.removeListener('error', function () {}); - - _removed = true; -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js deleted file mode 100644 index 43d0ab0971..0000000000 --- a/test/parallel/test-stream-pipe-event.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -function Writable() { - this.writable = true; - - require('stream').Stream.call(this); -} - -Object.setPrototypeOf(Writable.prototype, require('stream').Stream.prototype); -Object.setPrototypeOf(Writable, require('stream').Stream); - -function Readable() { - this.readable = true; - - require('stream').Stream.call(this); -} - -Object.setPrototypeOf(Readable.prototype, require('stream').Stream.prototype); -Object.setPrototypeOf(Readable, require('stream').Stream); -var passed = false; -var w = new Writable(); -w.on('pipe', function (src) { - passed = true; -}); -var r = new Readable(); -r.pipe(w); -assert.ok(passed); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-flow-after-unpipe.js b/test/parallel/test-stream-pipe-flow-after-unpipe.js deleted file mode 100644 index a87cc937a7..0000000000 --- a/test/parallel/test-stream-pipe-flow-after-unpipe.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable; // Tests that calling .unpipe() un-blocks a stream that is paused because -// it is waiting on the writable side to finish a write(). - - -var rs = new Readable({ - highWaterMark: 1, - // That this gets called at least 20 times is the real test here. - read: common.mustCallAtLeast(function () { - return rs.push('foo'); - }, 20) -}); -var ws = new Writable({ - highWaterMark: 1, - write: common.mustCall(function () { - // Ignore the callback, this write() simply never finishes. - setImmediate(function () { - return rs.unpipe(ws); - }); - }) -}); -var chunks = 0; -rs.on('data', common.mustCallAtLeast(function () { - chunks++; - if (chunks >= 20) rs.pause(); // Finish this test. -})); -rs.pipe(ws); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js deleted file mode 100644 index f1ba58e46d..0000000000 --- a/test/parallel/test-stream-pipe-flow.js +++ /dev/null @@ -1,95 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable, - PassThrough = _require.PassThrough; - -{ - var ticks = 17; - var rs = new Readable({ - objectMode: true, - read: function read() { - if (ticks-- > 0) return process.nextTick(function () { - return rs.push({}); - }); - rs.push({}); - rs.push(null); - } - }); - var ws = new Writable({ - highWaterMark: 0, - objectMode: true, - write: function write(data, end, cb) { - return setImmediate(cb); - } - }); - rs.on('end', common.mustCall()); - ws.on('finish', common.mustCall()); - rs.pipe(ws); -} -{ - var missing = 8; - - var _rs = new Readable({ - objectMode: true, - read: function read() { - if (missing--) _rs.push({});else _rs.push(null); - } - }); - - var pt = _rs.pipe(new PassThrough({ - objectMode: true, - highWaterMark: 2 - })).pipe(new PassThrough({ - objectMode: true, - highWaterMark: 2 - })); - - pt.on('end', function () { - wrapper.push(null); - }); - var wrapper = new Readable({ - objectMode: true, - read: function read() { - process.nextTick(function () { - var data = pt.read(); - - if (data === null) { - pt.once('readable', function () { - data = pt.read(); - if (data !== null) wrapper.push(data); - }); - } else { - wrapper.push(data); - } - }); - } - }); - wrapper.resume(); - wrapper.on('end', common.mustCall()); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js deleted file mode 100644 index 1ac02b0780..0000000000 --- a/test/parallel/test-stream-pipe-manual-resume.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -function test(throwCodeInbetween) { - // Check that a pipe does not stall if .read() is called unexpectedly - // (i.e. the stream is not resumed by the pipe). - var n = 1000; - var counter = n; - var rs = stream.Readable({ - objectMode: true, - read: common.mustCallAtLeast(function () { - if (--counter >= 0) rs.push({ - counter: counter - });else rs.push(null); - }, n) - }); - var ws = stream.Writable({ - objectMode: true, - write: common.mustCall(function (data, enc, cb) { - setImmediate(cb); - }, n) - }); - setImmediate(function () { - return throwCodeInbetween(rs, ws); - }); - rs.pipe(ws); -} - -test(function (rs) { - return rs.read(); -}); -test(function (rs) { - return rs.resume(); -}); -test(function () { - return 0; -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js deleted file mode 100644 index 011923a043..0000000000 --- a/test/parallel/test-stream-pipe-multiple-pipes.js +++ /dev/null @@ -1,113 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -var readable = new stream.Readable({ - read: function read() {} -}); -var writables = []; - -var _loop = function _loop(i) { - var target = new stream.Writable({ - write: common.mustCall(function (chunk, encoding, callback) { - target.output.push(chunk); - callback(); - }, 1) - }); - target.output = []; - target.on('pipe', common.mustCall()); - readable.pipe(target); - writables.push(target); -}; - -for (var i = 0; i < 5; i++) { - _loop(i); -} - -var input = bufferShim.from([1, 2, 3, 4, 5]); -readable.push(input); // The pipe() calls will postpone emission of the 'resume' event using nextTick, -// so no data will be available to the writable streams until then. - -process.nextTick(common.mustCall(function () { - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = writables[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var target = _step.value; - assert.deepStrictEqual(target.output, [input]); - target.on('unpipe', common.mustCall()); - readable.unpipe(target); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - - readable.push('something else'); // This does not get through. - - readable.push(null); - readable.resume(); // Make sure the 'end' event gets emitted. -})); -readable.on('end', common.mustCall(function () { - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = writables[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var target = _step2.value; - assert.deepStrictEqual(target.output, [input]); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } -})); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js deleted file mode 100644 index cbe6f66ad3..0000000000 --- a/test/parallel/test-stream-pipe-same-destination-twice.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); // Regression test for https://github.com/nodejs/node/issues/12718. -// Tests that piping a source stream twice to the same destination stream -// works, and that a subsequent unpipe() call only removes the pipe *once*. - - -var assert = require('assert/'); - -var _require = require('../../'), - PassThrough = _require.PassThrough, - Writable = _require.Writable; - -{ - var passThrough = new PassThrough(); - var dest = new Writable({ - write: common.mustCall(function (chunk, encoding, cb) { - assert.strictEqual("".concat(chunk), 'foobar'); - cb(); - }) - }); - passThrough.pipe(dest); - passThrough.pipe(dest); - assert.strictEqual(passThrough._events.data.length, 2); - assert.strictEqual(passThrough._readableState.pipesCount, 2); - assert.strictEqual(passThrough._readableState.pipes[0], dest); - assert.strictEqual(passThrough._readableState.pipes[1], dest); - passThrough.unpipe(dest); - assert.strictEqual(passThrough._events.data.length, 1); - assert.strictEqual(passThrough._readableState.pipesCount, 1); - assert.strictEqual(passThrough._readableState.pipes, dest); - passThrough.write('foobar'); - passThrough.pipe(dest); -} -{ - var _passThrough = new PassThrough(); - - var _dest = new Writable({ - write: common.mustCall(function (chunk, encoding, cb) { - assert.strictEqual("".concat(chunk), 'foobar'); - cb(); - }, 2) - }); - - _passThrough.pipe(_dest); - - _passThrough.pipe(_dest); - - assert.strictEqual(_passThrough._events.data.length, 2); - assert.strictEqual(_passThrough._readableState.pipesCount, 2); - assert.strictEqual(_passThrough._readableState.pipes[0], _dest); - assert.strictEqual(_passThrough._readableState.pipes[1], _dest); - - _passThrough.write('foobar'); -} -{ - var _passThrough2 = new PassThrough(); - - var _dest2 = new Writable({ - write: common.mustNotCall() - }); - - _passThrough2.pipe(_dest2); - - _passThrough2.pipe(_dest2); - - assert.strictEqual(_passThrough2._events.data.length, 2); - assert.strictEqual(_passThrough2._readableState.pipesCount, 2); - assert.strictEqual(_passThrough2._readableState.pipes[0], _dest2); - assert.strictEqual(_passThrough2._readableState.pipes[1], _dest2); - - _passThrough2.unpipe(_dest2); - - _passThrough2.unpipe(_dest2); - - assert.strictEqual(_passThrough2._events.data, undefined); - assert.strictEqual(_passThrough2._readableState.pipesCount, 0); - - _passThrough2.write('foobar'); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js deleted file mode 100644 index 163212c93f..0000000000 --- a/test/parallel/test-stream-pipe-unpipe-streams.js +++ /dev/null @@ -1,103 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable; - -var source = Readable({ - read: function read() {} -}); -var dest1 = Writable({ - write: function write() {} -}); -var dest2 = Writable({ - write: function write() {} -}); -source.pipe(dest1); -source.pipe(dest2); -dest1.on('unpipe', common.mustCall()); -dest2.on('unpipe', common.mustCall()); -assert.strictEqual(source._readableState.pipes[0], dest1); -assert.strictEqual(source._readableState.pipes[1], dest2); -assert.strictEqual(source._readableState.pipes.length, 2); // Should be able to unpipe them in the reverse order that they were piped. - -source.unpipe(dest2); -assert.strictEqual(source._readableState.pipes, dest1); -assert.notStrictEqual(source._readableState.pipes, dest2); -dest2.on('unpipe', common.mustNotCall()); -source.unpipe(dest2); -source.unpipe(dest1); -assert.strictEqual(source._readableState.pipes, null); -{ - // test `cleanup()` if we unpipe all streams. - var _source = Readable({ - read: function read() {} - }); - - var _dest = Writable({ - write: function write() {} - }); - - var _dest2 = Writable({ - write: function write() {} - }); - - var destCount = 0; - var srcCheckEventNames = ['end', 'data']; - var destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe']; - var checkSrcCleanup = common.mustCall(function () { - assert.strictEqual(_source._readableState.pipes, null); - assert.strictEqual(_source._readableState.pipesCount, 0); - assert.strictEqual(_source._readableState.flowing, false); - srcCheckEventNames.forEach(function (eventName) { - assert.strictEqual(_source.listenerCount(eventName), 0, "source's '".concat(eventName, "' event listeners not removed")); - }); - }); - - function checkDestCleanup(dest) { - var currentDestId = ++destCount; - - _source.pipe(dest); - - var unpipeChecker = common.mustCall(function () { - assert.deepStrictEqual(dest.listeners('unpipe'), [unpipeChecker], "destination{".concat(currentDestId, "} should have a 'unpipe' event ") + 'listener which is `unpipeChecker`'); - dest.removeListener('unpipe', unpipeChecker); - destCheckEventNames.forEach(function (eventName) { - assert.strictEqual(dest.listenerCount(eventName), 0, "destination{".concat(currentDestId, "}'s '").concat(eventName, "' event ") + 'listeners not removed'); - }); - if (--destCount === 0) checkSrcCleanup(); - }); - dest.on('unpipe', unpipeChecker); - } - - checkDestCleanup(_dest); - checkDestCleanup(_dest2); - - _source.unpipe(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js deleted file mode 100644 index 041218adb7..0000000000 --- a/test/parallel/test-stream-pipe-without-listenerCount.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var r = new stream.Stream(); -r.listenerCount = undefined; -var w = new stream.Stream(); -w.listenerCount = undefined; -w.on('pipe', function () { - r.emit('error', new Error('Readable Error')); - w.emit('error', new Error('Writable Error')); -}); -r.on('error', common.mustCall()); -w.on('error', common.mustCall()); -r.pipe(w); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js deleted file mode 100644 index ea252acd07..0000000000 --- a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Duplex = _require.Duplex, - pipeline = _require.pipeline; // Test that the callback for pipeline() is called even when the ._destroy() -// method of the stream places an .end() request to itself that does not -// get processed before the destruction of the stream (i.e. the 'close' event). -// Refs: https://github.com/nodejs/node/issues/24456 - - -var readable = new Readable({ - read: common.mustCall(function () {}) -}); -var duplex = new Duplex({ - write: function write(chunk, enc, cb) {// Simulate messages queueing up. - }, - read: function read() {}, - destroy: function destroy(err, cb) { - // Call end() from inside the destroy() method, like HTTP/2 streams - // do at the time of writing. - this.end(); - cb(err); - } -}); -duplex.on('finished', common.mustNotCall()); -pipeline(readable, duplex, common.mustCall(function (err) { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); -})); // Write one chunk of data, and destroy the stream later. -// That should trigger the pipeline destruction. - -readable.push('foo'); -setImmediate(function () { - readable.destroy(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js deleted file mode 100644 index 686f57027f..0000000000 --- a/test/parallel/test-stream-pipeline.js +++ /dev/null @@ -1,483 +0,0 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Stream = _require.Stream, - Writable = _require.Writable, - Readable = _require.Readable, - Transform = _require.Transform, - pipeline = _require.pipeline; - -var assert = require('assert/'); - -var http = require('http'); - -var promisify = require('util-promisify'); - -{ - var finished = false; - var processed = []; - var expected = [bufferShim.from('a'), bufferShim.from('b'), bufferShim.from('c')]; - var read = new Readable({ - read: function read() {} - }); - var write = new Writable({ - write: function write(data, enc, cb) { - processed.push(data); - cb(); - } - }); - write.on('finish', function () { - finished = true; - }); - - for (var i = 0; i < expected.length; i++) { - read.push(expected[i]); - } - - read.push(null); - pipeline(read, write, common.mustCall(function (err) { - assert.ok(!err, 'no error'); - assert.ok(finished); - assert.deepStrictEqual(processed, expected); - })); -} -{ - var _read = new Readable({ - read: function read() {} - }); - - assert.throws(function () { - pipeline(_read, function () {}); - }, /ERR_MISSING_ARGS/); - assert.throws(function () { - pipeline(function () {}); - }, /ERR_MISSING_ARGS/); - assert.throws(function () { - pipeline(); - }, /ERR_MISSING_ARGS/); -} -{ - var _read2 = new Readable({ - read: function read() {} - }); - - var _write = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - _read2.push('data'); - - setImmediate(function () { - return _read2.destroy(); - }); - pipeline(_read2, _write, common.mustCall(function (err) { - assert.ok(err, 'should have an error'); - })); -} -{ - var _read3 = new Readable({ - read: function read() {} - }); - - var _write2 = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - _read3.push('data'); - - setImmediate(function () { - return _read3.destroy(new Error('kaboom')); - }); - var dst = pipeline(_read3, _write2, common.mustCall(function (err) { - assert.strictEqual(err.message, 'kaboom'); - })); - assert.strictEqual(dst, _write2); -} -{ - var _read4 = new Readable({ - read: function read() {} - }); - - var transform = new Transform({ - transform: function transform(data, enc, cb) { - process.nextTick(cb, new Error('kaboom')); - } - }); - - var _write3 = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - _read4.on('close', common.mustCall()); - - transform.on('close', common.mustCall()); - - _write3.on('close', common.mustCall()); - - var _dst = pipeline(_read4, transform, _write3, common.mustCall(function (err) { - assert.strictEqual(err.message, 'kaboom'); - })); - - assert.strictEqual(_dst, _write3); - - _read4.push('hello'); -} -{ - var server = http.createServer(function (req, res) { - var rs = new Readable({ - read: function read() { - rs.push('hello'); - rs.push(null); - } - }); - pipeline(rs, res, function () {}); - }); - server.listen(0, function () { - var req = http.request({ - port: server.address().port - }); - req.end(); - req.on('response', function (res) { - var buf = []; - res.on('data', function (data) { - return buf.push(data); - }); - res.on('end', common.mustCall(function () { - assert.deepStrictEqual(Buffer.concat(buf), bufferShim.from('hello')); - server.close(); - })); - }); - }); -} -{ - var _server = http.createServer(function (req, res) { - var sent = false; - var rs = new Readable({ - read: function read() { - if (sent) { - return; - } - - sent = true; - rs.push('hello'); - }, - destroy: common.mustCall(function (err, cb) { - // prevents fd leaks by destroying http pipelines - cb(); - }) - }); - pipeline(rs, res, function () {}); - }); - - _server.listen(0, function () { - var req = http.request({ - port: _server.address().port - }); - req.end(); - req.on('response', function (res) { - setImmediate(function () { - res.destroy(); - - _server.close(); - }); - }); - }); -} -{ - var _server2 = http.createServer(function (req, res) { - var sent = 0; - var rs = new Readable({ - read: function read() { - if (sent++ > 10) { - return; - } - - rs.push('hello'); - }, - destroy: common.mustCall(function (err, cb) { - cb(); - }) - }); - pipeline(rs, res, function () {}); - }); - - var cnt = 10; - var badSink = new Writable({ - write: function write(data, enc, cb) { - cnt--; - if (cnt === 0) process.nextTick(cb, new Error('kaboom'));else cb(); - } - }); - - _server2.listen(0, function () { - var req = http.request({ - port: _server2.address().port - }); - req.end(); - req.on('response', function (res) { - pipeline(res, badSink, common.mustCall(function (err) { - assert.strictEqual(err.message, 'kaboom'); - - _server2.close(); - })); - }); - }); -} -{ - var _server3 = http.createServer(function (req, res) { - pipeline(req, res, common.mustCall()); - }); - - _server3.listen(0, function () { - var req = http.request({ - port: _server3.address().port - }); - var sent = 0; - var rs = new Readable({ - read: function read() { - if (sent++ > 10) { - return; - } - - rs.push('hello'); - } - }); - pipeline(rs, req, common.mustCall(function () { - _server3.close(); - })); - req.on('response', function (res) { - var cnt = 10; - res.on('data', function () { - cnt--; - if (cnt === 0) rs.destroy(); - }); - }); - }); -} -{ - var makeTransform = function makeTransform() { - var tr = new Transform({ - transform: function transform(data, enc, cb) { - cb(null, data); - } - }); - tr.on('close', common.mustCall()); - return tr; - }; - - var rs = new Readable({ - read: function read() { - rs.push('hello'); - } - }); - var _cnt = 10; - var ws = new Writable({ - write: function write(data, enc, cb) { - _cnt--; - if (_cnt === 0) return process.nextTick(cb, new Error('kaboom')); - cb(); - } - }); - rs.on('close', common.mustCall()); - ws.on('close', common.mustCall()); - pipeline(rs, makeTransform(), makeTransform(), makeTransform(), makeTransform(), makeTransform(), makeTransform(), ws, common.mustCall(function (err) { - assert.strictEqual(err.message, 'kaboom'); - })); -} -{ - var oldStream = new Stream(); - - oldStream.pause = oldStream.resume = function () {}; - - oldStream.write = function (data) { - oldStream.emit('data', data); - return true; - }; - - oldStream.end = function () { - oldStream.emit('end'); - }; - - var _expected = [bufferShim.from('hello'), bufferShim.from('world')]; - - var _rs = new Readable({ - read: function read() { - for (var _i = 0; _i < _expected.length; _i++) { - _rs.push(_expected[_i]); - } - - _rs.push(null); - } - }); - - var _ws = new Writable({ - write: function write(data, enc, cb) { - assert.deepStrictEqual(data, _expected.shift()); - cb(); - } - }); - - var _finished = false; - - _ws.on('finish', function () { - _finished = true; - }); - - pipeline(_rs, oldStream, _ws, common.mustCall(function (err) { - assert(!err, 'no error'); - assert(_finished, 'last stream finished'); - })); -} -{ - var _oldStream = new Stream(); - - _oldStream.pause = _oldStream.resume = function () {}; - - _oldStream.write = function (data) { - _oldStream.emit('data', data); - - return true; - }; - - _oldStream.end = function () { - _oldStream.emit('end'); - }; - - var destroyableOldStream = new Stream(); - - destroyableOldStream.pause = destroyableOldStream.resume = function () {}; - - destroyableOldStream.destroy = common.mustCall(function () { - destroyableOldStream.emit('close'); - }); - - destroyableOldStream.write = function (data) { - destroyableOldStream.emit('data', data); - return true; - }; - - destroyableOldStream.end = function () { - destroyableOldStream.emit('end'); - }; - - var _rs2 = new Readable({ - read: function read() { - _rs2.destroy(new Error('stop')); - } - }); - - var _ws2 = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - var _finished2 = false; - - _ws2.on('finish', function () { - _finished2 = true; - }); - - pipeline(_rs2, _oldStream, destroyableOldStream, _ws2, common.mustCall(function (err) { - assert.deepStrictEqual(err, new Error('stop')); - assert(!_finished2, 'should not finish'); - })); -} -{ - var pipelinePromise = promisify(pipeline); - - function run() { - return _run.apply(this, arguments); - } - - function _run() { - _run = _asyncToGenerator(function* () { - var read = new Readable({ - read: function read() {} - }); - var write = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - read.push('data'); - read.push(null); - var finished = false; - write.on('finish', function () { - finished = true; - }); - yield pipelinePromise(read, write); - assert(finished); - }); - return _run.apply(this, arguments); - } - - run(); -} -{ - var _read5 = new Readable({ - read: function read() {} - }); - - var _transform = new Transform({ - transform: function transform(data, enc, cb) { - process.nextTick(cb, new Error('kaboom')); - } - }); - - var _write4 = new Writable({ - write: function write(data, enc, cb) { - cb(); - } - }); - - _read5.on('close', common.mustCall()); - - _transform.on('close', common.mustCall()); - - _write4.on('close', common.mustCall()); - - process.on('uncaughtException', common.mustCall(function (err) { - assert.strictEqual(err.message, 'kaboom'); - })); - - var _dst2 = pipeline(_read5, _transform, _write4); - - assert.strictEqual(_dst2, _write4); - - _read5.push('hello'); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js deleted file mode 100644 index 4dd13be724..0000000000 --- a/test/parallel/test-stream-push-order.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var Readable = require('../../').Readable; - -var assert = require('assert/'); - -var s = new Readable({ - highWaterMark: 20, - encoding: 'ascii' -}); -var list = ['1', '2', '3', '4', '5', '6']; - -s._read = function (n) { - var one = list.shift(); - - if (!one) { - s.push(null); - } else { - var two = list.shift(); - s.push(one); - s.push(two); - } -}; - -s.read(0); // ACTUALLY [1, 3, 5, 6, 4, 2] - -process.on('exit', function () { - assert.deepStrictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6'); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js deleted file mode 100644 index f2555d6241..0000000000 --- a/test/parallel/test-stream-push-strings.js +++ /dev/null @@ -1,129 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../').Readable; - -var MyStream = -/*#__PURE__*/ -function (_Readable) { - _inherits(MyStream, _Readable); - - function MyStream(options) { - var _this; - - _classCallCheck(this, MyStream); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(MyStream).call(this, options)); - _this._chunks = 3; - return _this; - } - - _createClass(MyStream, [{ - key: "_read", - value: function _read(n) { - var _this2 = this; - - switch (this._chunks--) { - case 0: - return this.push(null); - - case 1: - return setTimeout(function () { - _this2.push('last chunk'); - }, 100); - - case 2: - return this.push('second to last chunk'); - - case 3: - return process.nextTick(function () { - _this2.push('first chunk'); - }); - - default: - throw new Error('?'); - } - } - }]); - - return MyStream; -}(Readable); - -var ms = new MyStream(); -var results = []; -ms.on('readable', function () { - var chunk; - - while (null !== (chunk = ms.read())) { - results.push(String(chunk)); - } -}); -var expect = ['first chunksecond to last chunk', 'last chunk']; -process.on('exit', function () { - assert.strictEqual(ms._chunks, -1); - assert.deepStrictEqual(results, expect); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-async-iterators.js b/test/parallel/test-stream-readable-async-iterators.js deleted file mode 100644 index d97cb866bc..0000000000 --- a/test/parallel/test-stream-readable-async-iterators.js +++ /dev/null @@ -1,816 +0,0 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function _asyncIterator(iterable) { var method; if (typeof Symbol !== "undefined") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable, - PassThrough = _require.PassThrough, - pipeline = _require.pipeline; - -var assert = require('assert/'); - -function tests() { - return _tests.apply(this, arguments); -} // to avoid missing some tests if a promise does not resolve - - -function _tests() { - _tests = _asyncToGenerator(function* () { - { - var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); - var rs = new Readable({}); - assert.strictEqual(Object.getPrototypeOf(Object.getPrototypeOf(rs[Symbol.asyncIterator]())), AsyncIteratorPrototype); - } - { - var readable = new Readable({ - objectMode: true, - read: function read() {} - }); - readable.push(0); - readable.push(1); - readable.push(null); - var iter = readable[Symbol.asyncIterator](); - assert.strictEqual((yield iter.next()).value, 0); - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - - var _iteratorError; - - try { - for (var _iterator = _asyncIterator(iter), _step, _value; _step = yield _iterator.next(), _iteratorNormalCompletion = _step.done, _value = yield _step.value, !_iteratorNormalCompletion; _iteratorNormalCompletion = true) { - var d = _value; - assert.strictEqual(d, 1); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - yield _iterator.return(); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - } - { - console.log('read without for..await'); - var max = 5; - - var _readable = new Readable({ - objectMode: true, - read: function read() {} - }); - - var _iter = _readable[Symbol.asyncIterator](); - - assert.strictEqual(_iter.stream, _readable); - var values = []; - - for (var i = 0; i < max; i++) { - values.push(_iter.next()); - } - - Promise.all(values).then(common.mustCall(function (values) { - values.forEach(common.mustCall(function (item, i) { - return assert.strictEqual(item.value, 'hello-' + i); - }, 5)); - })); - - _readable.push('hello-0'); - - _readable.push('hello-1'); - - _readable.push('hello-2'); - - _readable.push('hello-3'); - - _readable.push('hello-4'); - - _readable.push(null); - - var last = yield _iter.next(); - assert.strictEqual(last.done, true); - } - { - console.log('read without for..await deferred'); - - var _readable2 = new Readable({ - objectMode: true, - read: function read() {} - }); - - var _iter2 = _readable2[Symbol.asyncIterator](); - - assert.strictEqual(_iter2.stream, _readable2); - var _values = []; - - for (var _i = 0; _i < 3; _i++) { - _values.push(_iter2.next()); - } - - _readable2.push('hello-0'); - - _readable2.push('hello-1'); - - _readable2.push('hello-2'); - - var k = 0; - var results1 = yield Promise.all(_values); - results1.forEach(common.mustCall(function (item) { - return assert.strictEqual(item.value, 'hello-' + k++); - }, 3)); - _values = []; - - for (var _i2 = 0; _i2 < 2; _i2++) { - _values.push(_iter2.next()); - } - - _readable2.push('hello-3'); - - _readable2.push('hello-4'); - - _readable2.push(null); - - var results2 = yield Promise.all(_values); - results2.forEach(common.mustCall(function (item) { - return assert.strictEqual(item.value, 'hello-' + k++); - }, 2)); - - var _last = yield _iter2.next(); - - assert.strictEqual(_last.done, true); - } - { - console.log('read without for..await with errors'); - var _max = 3; - - var _readable3 = new Readable({ - objectMode: true, - read: function read() {} - }); - - var _iter3 = _readable3[Symbol.asyncIterator](); - - assert.strictEqual(_iter3.stream, _readable3); - var _values2 = []; - var errors = []; - - var _i3; - - for (_i3 = 0; _i3 < _max; _i3++) { - _values2.push(_iter3.next()); - } - - for (_i3 = 0; _i3 < 2; _i3++) { - errors.push(_iter3.next()); - } - - _readable3.push('hello-0'); - - _readable3.push('hello-1'); - - _readable3.push('hello-2'); - - var resolved = yield Promise.all(_values2); - resolved.forEach(common.mustCall(function (item, i) { - return assert.strictEqual(item.value, 'hello-' + i); - }, _max)); - errors.forEach(function (promise) { - promise.catch(common.mustCall(function (err) { - assert.strictEqual(err.message, 'kaboom'); - })); - }); - - _readable3.destroy(new Error('kaboom')); - } - { - console.log('call next() after error'); - - var _readable4 = new Readable({ - read: function read() {} - }); - - var iterator = _readable4[Symbol.asyncIterator](); - - var err = new Error('kaboom'); - - _readable4.destroy(new Error('kaboom')); - - yield function (f, e) { - var success = false; - f().then(function () { - success = true; - throw new Error('should not succeed'); - }).catch(function (e2) { - if (success) { - throw e2; - } - - assert.strictEqual(e.message, e2.message); - }); - }(iterator.next.bind(iterator), err); - } - { - console.log('read object mode'); - var _max2 = 42; - var readed = 0; - var received = 0; - - var _readable5 = new Readable({ - objectMode: true, - read: function read() { - this.push('hello'); - - if (++readed === _max2) { - this.push(null); - } - } - }); - - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - - var _iteratorError2; - - try { - for (var _iterator2 = _asyncIterator(_readable5), _step2, _value2; _step2 = yield _iterator2.next(), _iteratorNormalCompletion2 = _step2.done, _value2 = yield _step2.value, !_iteratorNormalCompletion2; _iteratorNormalCompletion2 = true) { - var _k = _value2; - received++; - assert.strictEqual(_k, 'hello'); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - yield _iterator2.return(); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - - assert.strictEqual(readed, received); - } - { - console.log('destroy sync'); - - var _readable6 = new Readable({ - objectMode: true, - read: function read() { - this.destroy(new Error('kaboom from read')); - } - }); - - var _err; - - try { - // eslint-disable-next-line no-unused-vars - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - - var _iteratorError3; - - try { - for (var _iterator3 = _asyncIterator(_readable6), _step3, _value3; _step3 = yield _iterator3.next(), _iteratorNormalCompletion3 = _step3.done, _value3 = yield _step3.value, !_iteratorNormalCompletion3; _iteratorNormalCompletion3 = true) { - var _k2 = _value3; - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3.return != null) { - yield _iterator3.return(); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - } catch (e) { - _err = e; - } - - assert.strictEqual(_err.message, 'kaboom from read'); - } - { - console.log('destroy async'); - - var _readable7 = new Readable({ - objectMode: true, - read: function read() { - var _this = this; - - if (!this.pushed) { - this.push('hello'); - this.pushed = true; - setImmediate(function () { - _this.destroy(new Error('kaboom')); - }); - } - } - }); - - var _received = 0; - var _err2 = null; - - try { - // eslint-disable-next-line no-unused-vars - var _iteratorNormalCompletion4 = true; - var _didIteratorError4 = false; - - var _iteratorError4; - - try { - for (var _iterator4 = _asyncIterator(_readable7), _step4, _value4; _step4 = yield _iterator4.next(), _iteratorNormalCompletion4 = _step4.done, _value4 = yield _step4.value, !_iteratorNormalCompletion4; _iteratorNormalCompletion4 = true) { - var _k3 = _value4; - _received++; - } - } catch (err) { - _didIteratorError4 = true; - _iteratorError4 = err; - } finally { - try { - if (!_iteratorNormalCompletion4 && _iterator4.return != null) { - yield _iterator4.return(); - } - } finally { - if (_didIteratorError4) { - throw _iteratorError4; - } - } - } - } catch (e) { - _err2 = e; - } - - assert.strictEqual(_err2.message, 'kaboom'); - assert.strictEqual(_received, 1); - } - { - console.log('destroyed by throw'); - - var _readable8 = new Readable({ - objectMode: true, - read: function read() { - this.push('hello'); - } - }); - - var _err3 = null; - - try { - var _iteratorNormalCompletion5 = true; - var _didIteratorError5 = false; - - var _iteratorError5; - - try { - for (var _iterator5 = _asyncIterator(_readable8), _step5, _value5; _step5 = yield _iterator5.next(), _iteratorNormalCompletion5 = _step5.done, _value5 = yield _step5.value, !_iteratorNormalCompletion5; _iteratorNormalCompletion5 = true) { - var _k4 = _value5; - assert.strictEqual(_k4, 'hello'); - throw new Error('kaboom'); - } - } catch (err) { - _didIteratorError5 = true; - _iteratorError5 = err; - } finally { - try { - if (!_iteratorNormalCompletion5 && _iterator5.return != null) { - yield _iterator5.return(); - } - } finally { - if (_didIteratorError5) { - throw _iteratorError5; - } - } - } - } catch (e) { - _err3 = e; - } - - assert.strictEqual(_err3.message, 'kaboom'); - assert.strictEqual(_readable8.destroyed, true); - } - { - console.log('destroyed sync after push'); - - var _readable9 = new Readable({ - objectMode: true, - read: function read() { - this.push('hello'); - this.destroy(new Error('kaboom')); - } - }); - - var _received2 = 0; - var _err4 = null; - - try { - var _iteratorNormalCompletion6 = true; - var _didIteratorError6 = false; - - var _iteratorError6; - - try { - for (var _iterator6 = _asyncIterator(_readable9), _step6, _value6; _step6 = yield _iterator6.next(), _iteratorNormalCompletion6 = _step6.done, _value6 = yield _step6.value, !_iteratorNormalCompletion6; _iteratorNormalCompletion6 = true) { - var _k5 = _value6; - assert.strictEqual(_k5, 'hello'); - _received2++; - } - } catch (err) { - _didIteratorError6 = true; - _iteratorError6 = err; - } finally { - try { - if (!_iteratorNormalCompletion6 && _iterator6.return != null) { - yield _iterator6.return(); - } - } finally { - if (_didIteratorError6) { - throw _iteratorError6; - } - } - } - } catch (e) { - _err4 = e; - } - - assert.strictEqual(_err4.message, 'kaboom'); - assert.strictEqual(_received2, 1); - } - { - console.log('push async'); - var _max3 = 42; - var _readed = 0; - var _received3 = 0; - - var _readable10 = new Readable({ - objectMode: true, - read: function read() { - var _this2 = this; - - setImmediate(function () { - _this2.push('hello'); - - if (++_readed === _max3) { - _this2.push(null); - } - }); - } - }); - - var _iteratorNormalCompletion7 = true; - var _didIteratorError7 = false; - - var _iteratorError7; - - try { - for (var _iterator7 = _asyncIterator(_readable10), _step7, _value7; _step7 = yield _iterator7.next(), _iteratorNormalCompletion7 = _step7.done, _value7 = yield _step7.value, !_iteratorNormalCompletion7; _iteratorNormalCompletion7 = true) { - var _k6 = _value7; - _received3++; - assert.strictEqual(_k6, 'hello'); - } - } catch (err) { - _didIteratorError7 = true; - _iteratorError7 = err; - } finally { - try { - if (!_iteratorNormalCompletion7 && _iterator7.return != null) { - yield _iterator7.return(); - } - } finally { - if (_didIteratorError7) { - throw _iteratorError7; - } - } - } - - assert.strictEqual(_readed, _received3); - } - { - console.log('push binary async'); - var _max4 = 42; - var _readed2 = 0; - - var _readable11 = new Readable({ - read: function read() { - var _this3 = this; - - setImmediate(function () { - _this3.push('hello'); - - if (++_readed2 === _max4) { - _this3.push(null); - } - }); - } - }); - - var expected = ''; - - _readable11.setEncoding('utf8'); - - _readable11.pause(); - - _readable11.on('data', function (chunk) { - expected += chunk; - }); - - var data = ''; - var _iteratorNormalCompletion8 = true; - var _didIteratorError8 = false; - - var _iteratorError8; - - try { - for (var _iterator8 = _asyncIterator(_readable11), _step8, _value8; _step8 = yield _iterator8.next(), _iteratorNormalCompletion8 = _step8.done, _value8 = yield _step8.value, !_iteratorNormalCompletion8; _iteratorNormalCompletion8 = true) { - var _k7 = _value8; - data += _k7; - } - } catch (err) { - _didIteratorError8 = true; - _iteratorError8 = err; - } finally { - try { - if (!_iteratorNormalCompletion8 && _iterator8.return != null) { - yield _iterator8.return(); - } - } finally { - if (_didIteratorError8) { - throw _iteratorError8; - } - } - } - - assert.strictEqual(data, expected); - } - { - console.log('.next() on destroyed stream'); - - var _readable12 = new Readable({ - read: function read() {// no-op - } - }); - - _readable12.destroy(); - - var _ref = yield _readable12[Symbol.asyncIterator]().next(), - done = _ref.done; - - assert.strictEqual(done, true); - } - { - console.log('.next() on pipelined stream'); - - var _readable13 = new Readable({ - read: function read() {// no-op - } - }); - - var passthrough = new PassThrough(); - - var _err5 = new Error('kaboom'); - - pipeline(_readable13, passthrough, common.mustCall(function (e) { - assert.strictEqual(e, _err5); - })); - - _readable13.destroy(_err5); - - try { - yield _readable13[Symbol.asyncIterator]().next(); - } catch (e) { - assert.strictEqual(e, _err5); - } - } - { - console.log('iterating on an ended stream completes'); - var r = new Readable({ - objectMode: true, - read: function read() { - this.push('asdf'); - this.push('hehe'); - this.push(null); - } - }); // eslint-disable-next-line no-unused-vars - - var _iteratorNormalCompletion9 = true; - var _didIteratorError9 = false; - - var _iteratorError9; - - try { - for (var _iterator9 = _asyncIterator(r), _step9, _value9; _step9 = yield _iterator9.next(), _iteratorNormalCompletion9 = _step9.done, _value9 = yield _step9.value, !_iteratorNormalCompletion9; _iteratorNormalCompletion9 = true) { - var a = _value9; - } // eslint-disable-next-line no-unused-vars - - } catch (err) { - _didIteratorError9 = true; - _iteratorError9 = err; - } finally { - try { - if (!_iteratorNormalCompletion9 && _iterator9.return != null) { - yield _iterator9.return(); - } - } finally { - if (_didIteratorError9) { - throw _iteratorError9; - } - } - } - - var _iteratorNormalCompletion10 = true; - var _didIteratorError10 = false; - - var _iteratorError10; - - try { - for (var _iterator10 = _asyncIterator(r), _step10, _value10; _step10 = yield _iterator10.next(), _iteratorNormalCompletion10 = _step10.done, _value10 = yield _step10.value, !_iteratorNormalCompletion10; _iteratorNormalCompletion10 = true) { - var b = _value10; - } - } catch (err) { - _didIteratorError10 = true; - _iteratorError10 = err; - } finally { - try { - if (!_iteratorNormalCompletion10 && _iterator10.return != null) { - yield _iterator10.return(); - } - } finally { - if (_didIteratorError10) { - throw _iteratorError10; - } - } - } - } - { - console.log('destroy mid-stream does not error'); - - var _r = new Readable({ - objectMode: true, - read: function read() { - this.push('asdf'); - this.push('hehe'); - } - }); // eslint-disable-next-line no-unused-vars - - - var _iteratorNormalCompletion11 = true; - var _didIteratorError11 = false; - - var _iteratorError11; - - try { - for (var _iterator11 = _asyncIterator(_r), _step11, _value11; _step11 = yield _iterator11.next(), _iteratorNormalCompletion11 = _step11.done, _value11 = yield _step11.value, !_iteratorNormalCompletion11; _iteratorNormalCompletion11 = true) { - var _a = _value11; - - _r.destroy(null); - } - } catch (err) { - _didIteratorError11 = true; - _iteratorError11 = err; - } finally { - try { - if (!_iteratorNormalCompletion11 && _iterator11.return != null) { - yield _iterator11.return(); - } - } finally { - if (_didIteratorError11) { - throw _iteratorError11; - } - } - } - } - { - console.log('all next promises must be resolved on end'); - - var _r2 = new Readable({ - objectMode: true, - read: function read() {} - }); - - var _b = _r2[Symbol.asyncIterator](); - - var c = _b.next(); - - var _d = _b.next(); - - _r2.push(null); - - assert.deepStrictEqual((yield c), { - done: true, - value: undefined - }); - assert.deepStrictEqual((yield _d), { - done: true, - value: undefined - }); - } - { - console.log('all next promises must be resolved on destroy'); - - var _r3 = new Readable({ - objectMode: true, - read: function read() {} - }); - - var _b2 = _r3[Symbol.asyncIterator](); - - var _c = _b2.next(); - - var _d2 = _b2.next(); - - _r3.destroy(); - - assert.deepStrictEqual((yield _c), { - done: true, - value: undefined - }); - assert.deepStrictEqual((yield _d2), { - done: true, - value: undefined - }); - } - { - console.log('all next promises must be resolved on destroy with error'); - - var _r4 = new Readable({ - objectMode: true, - read: function read() {} - }); - - var _b3 = _r4[Symbol.asyncIterator](); - - var _c2 = _b3.next(); - - var _d3 = _b3.next(); - - var _err6 = new Error('kaboom'); - - _r4.destroy(_err6); - - yield Promise.all([_asyncToGenerator(function* () { - var e; - - try { - yield _c2; - } catch (_e) { - e = _e; - } - - assert.strictEqual(e, _err6); - })(), _asyncToGenerator(function* () { - var e; - - try { - yield _d3; - } catch (_e) { - e = _e; - } - - assert.strictEqual(e, _err6); - })()]); - } - }); - return _tests.apply(this, arguments); -} - -tests().then(common.mustCall(), common.mustNotCall(console.log)); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js deleted file mode 100644 index d8f683dd2f..0000000000 --- a/test/parallel/test-stream-readable-constructor-set-methods.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var Readable = require('../../').Readable; - -var _read = common.mustCall(function _read(n) { - this.push(null); -}); - -var r = new Readable({ - read: _read -}); -r.resume(); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js deleted file mode 100644 index 439476ab33..0000000000 --- a/test/parallel/test-stream-readable-destroy.js +++ /dev/null @@ -1,226 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; - -var assert = require('assert/'); - -{ - var read = new Readable({ - read: function read() {} - }); - read.resume(); - read.on('close', common.mustCall()); - read.destroy(); - assert.strictEqual(read.destroyed, true); -} -{ - var _read = new Readable({ - read: function read() {} - }); - - _read.resume(); - - var expected = new Error('kaboom'); - - _read.on('end', common.mustNotCall('no end event')); - - _read.on('close', common.mustCall()); - - _read.on('error', common.mustCall(function (err) { - assert.strictEqual(err, expected); - })); - - _read.destroy(expected); - - assert.strictEqual(_read.destroyed, true); -} -{ - var _read2 = new Readable({ - read: function read() {} - }); - - _read2._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected); - cb(err); - }); - - var _expected = new Error('kaboom'); - - _read2.on('end', common.mustNotCall('no end event')); - - _read2.on('close', common.mustCall()); - - _read2.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected); - })); - - _read2.destroy(_expected); - - assert.strictEqual(_read2.destroyed, true); -} -{ - var _read3 = new Readable({ - read: function read() {}, - destroy: common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected2); - cb(); - }) - }); - - var _expected2 = new Error('kaboom'); - - _read3.on('end', common.mustNotCall('no end event')); // error is swallowed by the custom _destroy - - - _read3.on('error', common.mustNotCall('no error event')); - - _read3.on('close', common.mustCall()); - - _read3.destroy(_expected2); - - assert.strictEqual(_read3.destroyed, true); -} -{ - var _read4 = new Readable({ - read: function read() {} - }); - - _read4._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(); - }); - - _read4.destroy(); - - assert.strictEqual(_read4.destroyed, true); -} -{ - var _read5 = new Readable({ - read: function read() {} - }); - - _read5.resume(); - - _read5._destroy = common.mustCall(function (err, cb) { - var _this = this; - - assert.strictEqual(err, null); - process.nextTick(function () { - _this.push(null); - - cb(); - }); - }); - var fail = common.mustNotCall('no end event'); - - _read5.on('end', fail); - - _read5.on('close', common.mustCall()); - - _read5.destroy(); - - _read5.removeListener('end', fail); - - _read5.on('end', common.mustCall()); - - assert.strictEqual(_read5.destroyed, true); -} -{ - var _read6 = new Readable({ - read: function read() {} - }); - - var _expected3 = new Error('kaboom'); - - _read6._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(_expected3); - }); - - _read6.on('end', common.mustNotCall('no end event')); - - _read6.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected3); - })); - - _read6.destroy(); - - assert.strictEqual(_read6.destroyed, true); -} -{ - var _read7 = new Readable({ - read: function read() {} - }); - - _read7.resume(); - - _read7.destroyed = true; - assert.strictEqual(_read7.destroyed, true); // the internal destroy() mechanism should not be triggered - - _read7.on('end', common.mustNotCall()); - - _read7.destroy(); -} -{ - function MyReadable() { - assert.strictEqual(this.destroyed, false); - this.destroyed = false; - Readable.call(this); - } - - Object.setPrototypeOf(MyReadable.prototype, Readable.prototype); - Object.setPrototypeOf(MyReadable, Readable); - new MyReadable(); -} -{ - // destroy and destroy callback - var _read8 = new Readable({ - read: function read() {} - }); - - _read8.resume(); - - var _expected4 = new Error('kaboom'); - - _read8.on('close', common.mustCall()); - - _read8.destroy(_expected4, common.mustCall(function (err) { - assert.strictEqual(err, _expected4); - })); -} -{ - var _read9 = new Readable({ - read: function read() {} - }); - - _read9.destroy(); - - _read9.push('hi'); - - _read9.on('data', common.mustNotCall()); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-emittedReadable.js b/test/parallel/test-stream-readable-emittedReadable.js deleted file mode 100644 index d08ffce14b..0000000000 --- a/test/parallel/test-stream-readable-emittedReadable.js +++ /dev/null @@ -1,87 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../').Readable; - -var readable = new Readable({ - read: function read() {} -}); // Initialized to false. - -assert.strictEqual(readable._readableState.emittedReadable, false); -var expected = [bufferShim.from('foobar'), bufferShim.from('quo'), null]; -readable.on('readable', common.mustCall(function () { - // emittedReadable should be true when the readable event is emitted - assert.strictEqual(readable._readableState.emittedReadable, true); - assert.deepStrictEqual(readable.read(), expected.shift()); // emittedReadable is reset to false during read() - - assert.strictEqual(readable._readableState.emittedReadable, false); -}, 3)); // When the first readable listener is just attached, -// emittedReadable should be false - -assert.strictEqual(readable._readableState.emittedReadable, false); // These trigger a single 'readable', as things are batched up - -process.nextTick(common.mustCall(function () { - readable.push('foo'); -})); -process.nextTick(common.mustCall(function () { - readable.push('bar'); -})); // these triggers two readable events - -setImmediate(common.mustCall(function () { - readable.push('quo'); - process.nextTick(common.mustCall(function () { - readable.push(null); - })); -})); -var noRead = new Readable({ - read: function read() {} -}); -noRead.on('readable', common.mustCall(function () { - // emittedReadable should be true when the readable event is emitted - assert.strictEqual(noRead._readableState.emittedReadable, true); - noRead.read(0); // emittedReadable is not reset during read(0) - - assert.strictEqual(noRead._readableState.emittedReadable, true); -})); -noRead.push('foo'); -noRead.push(null); -var flowing = new Readable({ - read: function read() {} -}); -flowing.on('data', common.mustCall(function () { - // When in flowing mode, emittedReadable is always false. - assert.strictEqual(flowing._readableState.emittedReadable, false); - flowing.read(); - assert.strictEqual(flowing._readableState.emittedReadable, false); -}, 3)); -flowing.push('foooo'); -flowing.push('bar'); -flowing.push('quo'); -process.nextTick(common.mustCall(function () { - flowing.push(null); -})); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js deleted file mode 100644 index 1b33347760..0000000000 --- a/test/parallel/test-stream-readable-event.js +++ /dev/null @@ -1,152 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../').Readable; - -{ - // First test, not reading when the readable is added. - // make sure that on('readable', ...) triggers a readable event. - var r = new Readable({ - highWaterMark: 3 - }); - r._read = common.mustNotCall(); // This triggers a 'readable' event, which is lost. - - r.push(bufferShim.from('blerg')); - setTimeout(function () { - // we're testing what we think we are - assert(!r._readableState.reading); - r.on('readable', common.mustCall()); - }, 1); -} -{ - // second test, make sure that readable is re-emitted if there's - // already a length, while it IS reading. - var _r = new Readable({ - highWaterMark: 3 - }); - - _r._read = common.mustCall(); // This triggers a 'readable' event, which is lost. - - _r.push(bufferShim.from('bl')); - - setTimeout(function () { - // assert we're testing what we think we are - assert(_r._readableState.reading); - - _r.on('readable', common.mustCall()); - }, 1); -} -{ - // Third test, not reading when the stream has not passed - // the highWaterMark but *has* reached EOF. - var _r2 = new Readable({ - highWaterMark: 30 - }); - - _r2._read = common.mustNotCall(); // This triggers a 'readable' event, which is lost. - - _r2.push(bufferShim.from('blerg')); - - _r2.push(null); - - setTimeout(function () { - // assert we're testing what we think we are - assert(!_r2._readableState.reading); - - _r2.on('readable', common.mustCall()); - }, 1); -} -{ - // pushing a empty string in non-objectMode should - // trigger next `read()`. - var underlyingData = ['', 'x', 'y', '', 'z']; - var expected = underlyingData.filter(function (data) { - return data; - }); - var result = []; - - var _r3 = new Readable({ - encoding: 'utf8' - }); - - _r3._read = function () { - var _this = this; - - process.nextTick(function () { - if (!underlyingData.length) { - _this.push(null); - } else { - _this.push(underlyingData.shift()); - } - }); - }; - - _r3.on('readable', function () { - var data = _r3.read(); - - if (data !== null) result.push(data); - }); - - _r3.on('end', common.mustCall(function () { - assert.deepStrictEqual(result, expected); - })); -} -{ - // #20923 - var _r4 = new Readable(); - - _r4._read = function () {// actually doing thing here - }; - - _r4.on('data', function () {}); - - _r4.removeAllListeners(); - - assert.strictEqual(_r4.eventNames().length, 0); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js deleted file mode 100644 index ed98bd3244..0000000000 --- a/test/parallel/test-stream-readable-flow-recursion.js +++ /dev/null @@ -1,94 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); // this test verifies that passing a huge number to read(size) -// will push up the highWaterMark, and cause the stream to read -// more data continuously, but without triggering a nextTick -// warning or RangeError. - - -var Readable = require('../../').Readable; // throw an error if we trigger a nextTick warning. - - -process.throwDeprecation = true; -var stream = new Readable({ - highWaterMark: 2 -}); -var reads = 0; -var total = 5000; - -stream._read = function (size) { - reads++; - size = Math.min(size, total); - total -= size; - if (size === 0) stream.push(null);else stream.push(bufferShim.allocUnsafe(size)); -}; - -var depth = 0; - -function flow(stream, size, callback) { - depth += 1; - var chunk = stream.read(size); - if (!chunk) stream.once('readable', flow.bind(null, stream, size, callback));else callback(chunk); - depth -= 1; - console.log("flow(".concat(depth, "): exit")); -} - -flow(stream, 5000, function () { - console.log("complete (".concat(depth, ")")); -}); -process.on('exit', function (code) { - assert.strictEqual(reads, 2); // we pushed up the high water mark - - assert.strictEqual(stream.readableHighWaterMark, 8192); // length is 0 right now, because we pulled it all out. - - assert.strictEqual(stream.readableLength, 0); - assert(!code); - assert.strictEqual(depth, 0); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-hwm-0-async.js b/test/parallel/test-stream-readable-hwm-0-async.js deleted file mode 100644 index 5b4d102f52..0000000000 --- a/test/parallel/test-stream-readable-hwm-0-async.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); // This test ensures that Readable stream will continue to call _read -// for streams with highWaterMark === 0 once the stream returns data -// by calling push() asynchronously. - - -var _require = require('../../'), - Readable = _require.Readable; - -var count = 5; -var r = new Readable({ - // Called 6 times: First 5 return data, last one signals end of stream. - read: common.mustCall(function () { - process.nextTick(common.mustCall(function () { - if (count--) r.push('a');else r.push(null); - })); - }, 6), - highWaterMark: 0 -}); -r.on('end', common.mustCall()); -r.on('data', common.mustCall(5)); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js deleted file mode 100644 index 13f7ec3e49..0000000000 --- a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js +++ /dev/null @@ -1,106 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); // Ensure that subscribing the 'data' event will not make the stream flow. -// The 'data' event will require calling read() by hand. -// -// The test is written for the (somewhat rare) highWaterMark: 0 streams to -// specifically catch any regressions that might occur with these streams. - - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable; - -var streamData = ['a', null]; // Track the calls so we can assert their order later. - -var calls = []; -var r = new Readable({ - read: common.mustCall(function () { - calls.push('_read:' + streamData[0]); - process.nextTick(function () { - calls.push('push:' + streamData[0]); - r.push(streamData.shift()); - }); - }, streamData.length), - highWaterMark: 0, - // Object mode is used here just for testing convenience. It really - // shouldn't affect the order of events. Just the data and its format. - objectMode: true -}); -assert.strictEqual(r.readableFlowing, null); -r.on('readable', common.mustCall(function () { - calls.push('readable'); -}, 2)); -assert.strictEqual(r.readableFlowing, false); -r.on('data', common.mustCall(function (data) { - calls.push('data:' + data); -}, 1)); -r.on('end', common.mustCall(function () { - calls.push('end'); -})); -assert.strictEqual(r.readableFlowing, false); // The stream emits the events asynchronously but that's not guaranteed to -// happen on the next tick (especially since the _read implementation above -// uses process.nextTick). -// -// We use setImmediate here to give the stream enough time to emit all the -// events it's about to emit. - -setImmediate(function () { - // Only the _read, push, readable calls have happened. No data must be - // emitted yet. - assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']); // Calling 'r.read()' should trigger the data event. - - assert.strictEqual(r.read(), 'a'); - assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a']); // The next 'read()' will return null because hwm: 0 does not buffer any - // data and the _read implementation above does the push() asynchronously. - // - // Note: This 'null' signals "no data available". It isn't the end-of-stream - // null value as the stream doesn't know yet that it is about to reach the - // end. - // - // Using setImmediate again to give the stream enough time to emit all the - // events it wants to emit. - - assert.strictEqual(r.read(), null); - setImmediate(function () { - // There's a new 'readable' event after the data has been pushed. - // The 'end' event will be emitted only after a 'read()'. - // - // This is somewhat special for the case where the '_read' implementation - // calls 'push' asynchronously. If 'push' was synchronous, the 'end' event - // would be emitted here _before_ we call read(). - assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable']); - assert.strictEqual(r.read(), null); // While it isn't really specified whether the 'end' event should happen - // synchronously with read() or not, we'll assert the current behavior - // ('end' event happening on the next tick after read()) so any changes - // to it are noted and acknowledged in the future. - - assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable']); - process.nextTick(function () { - assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable', 'end']); - }); - }); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-hwm-0.js b/test/parallel/test-stream-readable-hwm-0.js deleted file mode 100644 index c2752273d8..0000000000 --- a/test/parallel/test-stream-readable-hwm-0.js +++ /dev/null @@ -1,52 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); // This test ensures that Readable stream will call _read() for streams -// with highWaterMark === 0 upon .read(0) instead of just trying to -// emit 'readable' event. - - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable; - -var r = new Readable({ - // must be called only once upon setting 'readable' listener - read: common.mustCall(), - highWaterMark: 0 -}); -var pushedNull = false; // this will trigger read(0) but must only be called after push(null) -// because the we haven't pushed any data - -r.on('readable', common.mustCall(function () { - assert.strictEqual(r.read(), null); - assert.strictEqual(pushedNull, true); -})); -r.on('end', common.mustCall()); -process.nextTick(function () { - assert.strictEqual(r.read(), null); - pushedNull = true; - r.push(null); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js deleted file mode 100644 index f51c0d218c..0000000000 --- a/test/parallel/test-stream-readable-infinite-read.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable; - -var buf = bufferShim.alloc(8192); -var readable = new Readable({ - read: common.mustCall(function () { - this.push(buf); - }, 31) -}); -var i = 0; -readable.on('readable', common.mustCall(function () { - if (i++ === 10) { - // We will just terminate now. - process.removeAllListeners('readable'); - return; - } - - var data = readable.read(); // TODO(mcollina): there is something odd in the highWaterMark logic - // investigate. - - if (i === 1) { - assert.strictEqual(data.length, 8192 * 2); - } else { - assert.strictEqual(data.length, 8192 * 3); - } -}, 11)); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-invalid-chunk.js b/test/parallel/test-stream-readable-invalid-chunk.js deleted file mode 100644 index 0512890b50..0000000000 --- a/test/parallel/test-stream-readable-invalid-chunk.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var readable = new stream.Readable({ - read: function read() {} -}); - -function checkError(fn) { - common.expectsError(fn, { - code: 'ERR_INVALID_ARG_TYPE', - type: TypeError - }); -} - -checkError(function () { - return readable.push([]); -}); -checkError(function () { - return readable.push({}); -}); -checkError(function () { - return readable.push(0); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js deleted file mode 100644 index df882d2b4b..0000000000 --- a/test/parallel/test-stream-readable-needReadable.js +++ /dev/null @@ -1,108 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../').Readable; - -var readable = new Readable({ - read: function read() {} -}); // Initialized to false. - -assert.strictEqual(readable._readableState.needReadable, false); -readable.on('readable', common.mustCall(function () { - // When the readable event fires, needReadable is reset. - assert.strictEqual(readable._readableState.needReadable, false); - readable.read(); -})); // If a readable listener is attached, then a readable event is needed. - -assert.strictEqual(readable._readableState.needReadable, true); -readable.push('foo'); -readable.push(null); -readable.on('end', common.mustCall(function () { - // No need to emit readable anymore when the stream ends. - assert.strictEqual(readable._readableState.needReadable, false); -})); -var asyncReadable = new Readable({ - read: function read() {} -}); -asyncReadable.on('readable', common.mustCall(function () { - if (asyncReadable.read() !== null) { - // After each read(), the buffer is empty. - // If the stream doesn't end now, - // then we need to notify the reader on future changes. - assert.strictEqual(asyncReadable._readableState.needReadable, true); - } -}, 2)); -process.nextTick(common.mustCall(function () { - asyncReadable.push('foooo'); -})); -process.nextTick(common.mustCall(function () { - asyncReadable.push('bar'); -})); -setImmediate(common.mustCall(function () { - asyncReadable.push(null); - assert.strictEqual(asyncReadable._readableState.needReadable, false); -})); -var flowing = new Readable({ - read: function read() {} -}); // Notice this must be above the on('data') call. - -flowing.push('foooo'); -flowing.push('bar'); -flowing.push('quo'); -process.nextTick(common.mustCall(function () { - flowing.push(null); -})); // When the buffer already has enough data, and the stream is -// in flowing mode, there is no need for the readable event. - -flowing.on('data', common.mustCall(function (data) { - assert.strictEqual(flowing._readableState.needReadable, false); -}, 3)); -var slowProducer = new Readable({ - read: function read() {} -}); -slowProducer.on('readable', common.mustCall(function () { - if (slowProducer.read(8) === null) { - // The buffer doesn't have enough data, and the stream is not need, - // we need to notify the reader when data arrives. - assert.strictEqual(slowProducer._readableState.needReadable, true); - } else { - assert.strictEqual(slowProducer._readableState.needReadable, false); - } -}, 4)); -process.nextTick(common.mustCall(function () { - slowProducer.push('foo'); - process.nextTick(common.mustCall(function () { - slowProducer.push('foo'); - process.nextTick(common.mustCall(function () { - slowProducer.push('foo'); - process.nextTick(common.mustCall(function () { - slowProducer.push(null); - })); - })); - })); -})); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-no-unneeded-readable.js b/test/parallel/test-stream-readable-no-unneeded-readable.js deleted file mode 100644 index 2da85fb9c9..0000000000 --- a/test/parallel/test-stream-readable-no-unneeded-readable.js +++ /dev/null @@ -1,87 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable, - PassThrough = _require.PassThrough; - -function test(r) { - var wrapper = new Readable({ - read: function read() { - var data = r.read(); - - if (data) { - wrapper.push(data); - return; - } - - r.once('readable', function () { - data = r.read(); - - if (data) { - wrapper.push(data); - } // else the end event should fire - - }); - } - }); - r.once('end', function () { - wrapper.push(null); - }); - wrapper.resume(); - wrapper.once('end', common.mustCall()); -} - -{ - var source = new Readable({ - read: function read() {} - }); - source.push('foo'); - source.push('bar'); - source.push(null); - var pt = source.pipe(new PassThrough()); - test(pt); -} -{ - // This is the underlying cause of the above test case. - var pushChunks = ['foo', 'bar']; - var r = new Readable({ - read: function read() { - var chunk = pushChunks.shift(); - - if (chunk) { - // synchronous call - r.push(chunk); - } else { - // asynchronous call - process.nextTick(function () { - return r.push(null); - }); - } - } - }); - test(r); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js deleted file mode 100644 index 8aecbf1546..0000000000 --- a/test/parallel/test-stream-readable-object-multi-push-async.js +++ /dev/null @@ -1,232 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable; - -var MAX = 42; -var BATCH = 10; -{ - var readable = new Readable({ - objectMode: true, - read: common.mustCall(function () { - var _this = this; - - console.log('>> READ'); - fetchData(function (err, data) { - if (err) { - _this.destroy(err); - - return; - } - - if (data.length === 0) { - console.log('pushing null'); - - _this.push(null); - - return; - } - - console.log('pushing'); - data.forEach(function (d) { - return _this.push(d); - }); - }); - }, Math.floor(MAX / BATCH) + 2) - }); - var i = 0; - - function fetchData(cb) { - if (i > MAX) { - setTimeout(cb, 10, null, []); - } else { - var array = []; - var max = i + BATCH; - - for (; i < max; i++) { - array.push(i); - } - - setTimeout(cb, 10, null, array); - } - } - - readable.on('readable', function () { - var data; - console.log('readable emitted'); - - while (data = readable.read()) { - console.log(data); - } - }); - readable.on('end', common.mustCall(function () { - assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); - })); -} -{ - var _readable = new Readable({ - objectMode: true, - read: common.mustCall(function () { - var _this2 = this; - - console.log('>> READ'); - fetchData(function (err, data) { - if (err) { - _this2.destroy(err); - - return; - } - - if (data.length === 0) { - console.log('pushing null'); - - _this2.push(null); - - return; - } - - console.log('pushing'); - data.forEach(function (d) { - return _this2.push(d); - }); - }); - }, Math.floor(MAX / BATCH) + 2) - }); - - var _i = 0; - - function fetchData(cb) { - if (_i > MAX) { - setTimeout(cb, 10, null, []); - } else { - var array = []; - var max = _i + BATCH; - - for (; _i < max; _i++) { - array.push(_i); - } - - setTimeout(cb, 10, null, array); - } - } - - _readable.on('data', function (data) { - console.log('data emitted', data); - }); - - _readable.on('end', common.mustCall(function () { - assert.strictEqual(_i, (Math.floor(MAX / BATCH) + 1) * BATCH); - })); -} -{ - var _readable2 = new Readable({ - objectMode: true, - read: common.mustCall(function () { - var _this3 = this; - - console.log('>> READ'); - fetchData(function (err, data) { - if (err) { - _this3.destroy(err); - - return; - } - - console.log('pushing'); - data.forEach(function (d) { - return _this3.push(d); - }); - - if (data[BATCH - 1] >= MAX) { - console.log('pushing null'); - - _this3.push(null); - } - }); - }, Math.floor(MAX / BATCH) + 1) - }); - - var _i2 = 0; - - function fetchData(cb) { - var array = []; - var max = _i2 + BATCH; - - for (; _i2 < max; _i2++) { - array.push(_i2); - } - - setTimeout(cb, 10, null, array); - } - - _readable2.on('data', function (data) { - console.log('data emitted', data); - }); - - _readable2.on('end', common.mustCall(function () { - assert.strictEqual(_i2, (Math.floor(MAX / BATCH) + 1) * BATCH); - })); -} -{ - var _readable3 = new Readable({ - objectMode: true, - read: common.mustNotCall() - }); - - _readable3.on('data', common.mustNotCall()); - - _readable3.push(null); - - var nextTickPassed = false; - process.nextTick(function () { - nextTickPassed = true; - }); - - _readable3.on('end', common.mustCall(function () { - assert.strictEqual(nextTickPassed, true); - })); -} -{ - var _readable4 = new Readable({ - objectMode: true, - read: common.mustCall() - }); - - _readable4.on('data', function (data) { - console.log('data emitted', data); - }); - - _readable4.on('end', common.mustCall()); - - setImmediate(function () { - _readable4.push('aaa'); - - _readable4.push(null); - }); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-pause-and-resume.js b/test/parallel/test-stream-readable-pause-and-resume.js deleted file mode 100644 index fe852cf426..0000000000 --- a/test/parallel/test-stream-readable-pause-and-resume.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var _require = require('../../'), - Readable = _require.Readable; - -var common = require('../common'); - -var ticks = 18; -var expectedData = 19; -var rs = new Readable({ - objectMode: true, - read: function read() { - if (ticks-- > 0) return process.nextTick(function () { - return rs.push({}); - }); - rs.push({}); - rs.push(null); - } -}); -rs.on('end', common.mustCall()); -readAndPause(); - -function readAndPause() { - // Does a on(data) -> pause -> wait -> resume -> on(data) ... loop. - // Expects on(data) to never fire if the stream is paused. - var ondata = common.mustCall(function (data) { - rs.pause(); - expectedData--; - if (expectedData <= 0) return; - setImmediate(function () { - rs.removeListener('data', ondata); - readAndPause(); - rs.resume(); - }); - }, 1); // only call ondata once - - rs.on('data', ondata); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-readable-then-resume.js b/test/parallel/test-stream-readable-readable-then-resume.js deleted file mode 100644 index 5764ab8ddd..0000000000 --- a/test/parallel/test-stream-readable-readable-then-resume.js +++ /dev/null @@ -1,53 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; // This test verifies that a stream could be resumed after -// removing the readable event in the same tick - - -check(new Readable({ - objectMode: true, - highWaterMark: 1, - read: function read() { - if (!this.first) { - this.push('hello'); - this.first = true; - return; - } - - this.push(null); - } -})); - -function check(s) { - var readableListener = common.mustNotCall(); - s.on('readable', readableListener); - s.on('end', common.mustCall()); - s.removeListener('readable', readableListener); - s.resume(); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js deleted file mode 100644 index a17b4c6cdf..0000000000 --- a/test/parallel/test-stream-readable-reading-readingMore.js +++ /dev/null @@ -1,178 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../').Readable; - -{ - var readable = new Readable({ - read: function read(size) {} - }); - var state = readable._readableState; // Starting off with false initially. - - assert.strictEqual(state.reading, false); - assert.strictEqual(state.readingMore, false); - readable.on('data', common.mustCall(function (data) { - // while in a flowing state with a 'readable' listener - // we should not be reading more - if (readable.readableFlowing) assert.strictEqual(state.readingMore, true); // reading as long as we've not ended - - assert.strictEqual(state.reading, !state.ended); - }, 2)); - - function onStreamEnd() { - // End of stream; state.reading is false - // And so should be readingMore. - assert.strictEqual(state.readingMore, false); - assert.strictEqual(state.reading, false); - } - - var expectedReadingMore = [true, false]; - readable.on('readable', common.mustCall(function () { - // there is only one readingMore scheduled from on('data'), - // after which everything is governed by the .read() call - assert.strictEqual(state.readingMore, expectedReadingMore.shift()); // if the stream has ended, we shouldn't be reading - - assert.strictEqual(state.ended, !state.reading); - var data = readable.read(); - if (data === null) // reached end of stream - process.nextTick(common.mustCall(onStreamEnd, 1)); - }, 2)); - readable.on('end', common.mustCall(onStreamEnd)); - readable.push('pushed'); - readable.read(6); // reading - - assert.strictEqual(state.reading, true); - assert.strictEqual(state.readingMore, true); // add chunk to front - - readable.unshift('unshifted'); // end - - readable.push(null); -} -{ - var _readable = new Readable({ - read: function read(size) {} - }); - - var _state = _readable._readableState; // Starting off with false initially. - - assert.strictEqual(_state.reading, false); - assert.strictEqual(_state.readingMore, false); - - _readable.on('data', common.mustCall(function (data) { - // while in a flowing state without a 'readable' listener - // we should be reading more - if (_readable.readableFlowing) assert.strictEqual(_state.readingMore, true); // reading as long as we've not ended - - assert.strictEqual(_state.reading, !_state.ended); - }, 2)); - - function onStreamEnd() { - // End of stream; state.reading is false - // And so should be readingMore. - assert.strictEqual(_state.readingMore, false); - assert.strictEqual(_state.reading, false); - } - - _readable.on('end', common.mustCall(onStreamEnd)); - - _readable.push('pushed'); // stop emitting 'data' events - - - assert.strictEqual(_state.flowing, true); - - _readable.pause(); // paused - - - assert.strictEqual(_state.reading, false); - assert.strictEqual(_state.flowing, false); - - _readable.resume(); - - assert.strictEqual(_state.reading, false); - assert.strictEqual(_state.flowing, true); // add chunk to front - - _readable.unshift('unshifted'); // end - - - _readable.push(null); -} -{ - var _readable2 = new Readable({ - read: function read(size) {} - }); - - var _state2 = _readable2._readableState; // Starting off with false initially. - - assert.strictEqual(_state2.reading, false); - assert.strictEqual(_state2.readingMore, false); - var onReadable = common.mustNotCall; - - _readable2.on('readable', onReadable); - - _readable2.on('data', common.mustCall(function (data) { - // reading as long as we've not ended - assert.strictEqual(_state2.reading, !_state2.ended); - }, 2)); - - _readable2.removeListener('readable', onReadable); - - function onStreamEnd() { - // End of stream; state.reading is false - // And so should be readingMore. - assert.strictEqual(_state2.readingMore, false); - assert.strictEqual(_state2.reading, false); - } - - _readable2.on('end', common.mustCall(onStreamEnd)); - - _readable2.push('pushed'); // we are still not flowing, we will be resuming in the next tick - - - assert.strictEqual(_state2.flowing, false); // wait for nextTick, so the readableListener flag resets - - process.nextTick(function () { - _readable2.resume(); // stop emitting 'data' events - - - assert.strictEqual(_state2.flowing, true); - - _readable2.pause(); // paused - - - assert.strictEqual(_state2.flowing, false); - - _readable2.resume(); - - assert.strictEqual(_state2.flowing, true); // add chunk to front - - _readable2.unshift('unshifted'); // end - - - _readable2.push(null); - }); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js deleted file mode 100644 index 93a4a54226..0000000000 --- a/test/parallel/test-stream-readable-resume-hwm.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; // readable.resume() should not lead to a ._read() call being scheduled -// when we exceed the high water mark already. - - -var readable = new Readable({ - read: common.mustNotCall(), - highWaterMark: 100 -}); // Fill up the internal buffer so that we definitely exceed the HWM: - -for (var i = 0; i < 10; i++) { - readable.push('a'.repeat(200)); -} // Call resume, and pause after one chunk. -// The .pause() is just so that we don’t empty the buffer fully, which would -// be a valid reason to call ._read(). - - -readable.resume(); -readable.once('data', common.mustCall(function () { - return readable.pause(); -})); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js deleted file mode 100644 index 8aba21c386..0000000000 --- a/test/parallel/test-stream-readable-resumeScheduled.js +++ /dev/null @@ -1,91 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); // Testing Readable Stream resumeScheduled state - - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable; - -{ - // pipe() test case - var r = new Readable({ - read: function read() {} - }); - var w = new Writable(); // resumeScheduled should start = `false`. - - assert.strictEqual(r._readableState.resumeScheduled, false); // calling pipe() should change the state value = true. - - r.pipe(w); - assert.strictEqual(r._readableState.resumeScheduled, true); - process.nextTick(common.mustCall(function () { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); -} -{ - // 'data' listener test case - var _r = new Readable({ - read: function read() {} - }); // resumeScheduled should start = `false`. - - - assert.strictEqual(_r._readableState.resumeScheduled, false); - - _r.push(bufferShim.from([1, 2, 3])); // adding 'data' listener should change the state value - - - _r.on('data', common.mustCall(function () { - assert.strictEqual(_r._readableState.resumeScheduled, false); - })); - - assert.strictEqual(_r._readableState.resumeScheduled, true); - process.nextTick(common.mustCall(function () { - assert.strictEqual(_r._readableState.resumeScheduled, false); - })); -} -{ - // resume() test case - var _r2 = new Readable({ - read: function read() {} - }); // resumeScheduled should start = `false`. - - - assert.strictEqual(_r2._readableState.resumeScheduled, false); // Calling resume() should change the state value. - - _r2.resume(); - - assert.strictEqual(_r2._readableState.resumeScheduled, true); - - _r2.on('resume', common.mustCall(function () { - // The state value should be `false` again - assert.strictEqual(_r2._readableState.resumeScheduled, false); - })); - - process.nextTick(common.mustCall(function () { - assert.strictEqual(_r2._readableState.resumeScheduled, false); - })); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js deleted file mode 100644 index 31bd3263ce..0000000000 --- a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js +++ /dev/null @@ -1,101 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; - -var assert = require('assert/'); - -{ - // Call .setEncoding() while there are bytes already in the buffer. - var r = new Readable({ - read: function read() {} - }); - r.push(bufferShim.from('a')); - r.push(bufferShim.from('b')); - r.setEncoding('utf8'); - var chunks = []; - r.on('data', function (chunk) { - return chunks.push(chunk); - }); - process.nextTick(function () { - assert.deepStrictEqual(chunks, ['ab']); - }); -} -{ - // Call .setEncoding() while the buffer contains a complete, - // but chunked character. - var _r = new Readable({ - read: function read() {} - }); - - _r.push(bufferShim.from([0xf0])); - - _r.push(bufferShim.from([0x9f])); - - _r.push(bufferShim.from([0x8e])); - - _r.push(bufferShim.from([0x89])); - - _r.setEncoding('utf8'); - - var _chunks = []; - - _r.on('data', function (chunk) { - return _chunks.push(chunk); - }); - - process.nextTick(function () { - assert.deepStrictEqual(_chunks, ['🎉']); - }); -} -{ - // Call .setEncoding() while the buffer contains an incomplete character, - // and finish the character later. - var _r2 = new Readable({ - read: function read() {} - }); - - _r2.push(bufferShim.from([0xf0])); - - _r2.push(bufferShim.from([0x9f])); - - _r2.setEncoding('utf8'); - - _r2.push(bufferShim.from([0x8e])); - - _r2.push(bufferShim.from([0x89])); - - var _chunks2 = []; - - _r2.on('data', function (chunk) { - return _chunks2.push(chunk); - }); - - process.nextTick(function () { - assert.deepStrictEqual(_chunks2, ['🎉']); - }); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-setEncoding-null.js b/test/parallel/test-stream-readable-setEncoding-null.js deleted file mode 100644 index c4276b4fbb..0000000000 --- a/test/parallel/test-stream-readable-setEncoding-null.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable; - -{ - var readable = new Readable({ - encoding: 'hex' - }); - assert.strictEqual(readable._readableState.encoding, 'hex'); - readable.setEncoding(null); - assert.strictEqual(readable._readableState.encoding, 'utf8'); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js deleted file mode 100644 index 42b8c26134..0000000000 --- a/test/parallel/test-stream-readable-with-unimplemented-_read.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Readable = _require.Readable; - -var readable = new Readable(); -readable.on('error', common.expectsError({ - code: 'ERR_METHOD_NOT_IMPLEMENTED', - type: Error, - message: 'The _read() method is not implemented' -})); -readable.read(); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-readableListening-state.js b/test/parallel/test-stream-readableListening-state.js deleted file mode 100644 index cf6c46b759..0000000000 --- a/test/parallel/test-stream-readableListening-state.js +++ /dev/null @@ -1,51 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var r = new stream.Readable({ - read: function read() {} -}); // readableListening state should start in `false`. - -assert.strictEqual(r._readableState.readableListening, false); -r.on('readable', common.mustCall(function () { - // Inside the readable event this state should be true. - assert.strictEqual(r._readableState.readableListening, true); -})); -r.push(bufferShim.from('Testing readableListening state')); -var r2 = new stream.Readable({ - read: function read() {} -}); // readableListening state should start in `false`. - -assert.strictEqual(r2._readableState.readableListening, false); -r2.on('data', common.mustCall(function (chunk) { - // readableListening should be false because we don't have - // a `readable` listener - assert.strictEqual(r2._readableState.readableListening, false); -})); -r2.push(bufferShim.from('Testing readableListening state')); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-callback-twice.js b/test/parallel/test-stream-transform-callback-twice.js deleted file mode 100644 index 005d5bf679..0000000000 --- a/test/parallel/test-stream-transform-callback-twice.js +++ /dev/null @@ -1,41 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Transform = _require.Transform; - -var stream = new Transform({ - transform: function transform(chunk, enc, cb) { - cb(); - cb(); - } -}); -stream.on('error', common.expectsError({ - type: Error, - message: 'Callback called multiple times', - code: 'ERR_MULTIPLE_CALLBACK' -})); -stream.write('foo'); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js deleted file mode 100644 index 2b57d43b4f..0000000000 --- a/test/parallel/test-stream-transform-constructor-set-methods.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('assert/'), - strictEqual = _require.strictEqual; - -var _require2 = require('../../'), - Transform = _require2.Transform; - -var t = new Transform(); -t.on('error', common.expectsError({ - type: Error, - code: 'ERR_METHOD_NOT_IMPLEMENTED', - message: 'The _transform() method is not implemented' -})); -t.end(bufferShim.from('blerg')); - -var _transform = common.mustCall(function (chunk, _, next) { - next(); -}); - -var _final = common.mustCall(function (next) { - next(); -}); - -var _flush = common.mustCall(function (next) { - next(); -}); - -var t2 = new Transform({ - transform: _transform, - flush: _flush, - final: _final -}); -strictEqual(t2._transform, _transform); -strictEqual(t2._flush, _flush); -strictEqual(t2._final, _final); -t2.end(bufferShim.from('blerg')); -t2.resume(); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-destroy.js b/test/parallel/test-stream-transform-destroy.js deleted file mode 100644 index 3a6f1dda9f..0000000000 --- a/test/parallel/test-stream-transform-destroy.js +++ /dev/null @@ -1,181 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Transform = _require.Transform; - -var assert = require('assert/'); - -{ - var transform = new Transform({ - transform: function transform(chunk, enc, cb) {} - }); - transform.resume(); - transform.on('end', common.mustNotCall()); - transform.on('close', common.mustCall()); - transform.on('finish', common.mustNotCall()); - transform.destroy(); -} -{ - var _transform = new Transform({ - transform: function transform(chunk, enc, cb) {} - }); - - _transform.resume(); - - var expected = new Error('kaboom'); - - _transform.on('end', common.mustNotCall()); - - _transform.on('finish', common.mustNotCall()); - - _transform.on('close', common.mustCall()); - - _transform.on('error', common.mustCall(function (err) { - assert.strictEqual(err, expected); - })); - - _transform.destroy(expected); -} -{ - var _transform2 = new Transform({ - transform: function transform(chunk, enc, cb) {} - }); - - _transform2._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected); - cb(err); - }, 1); - - var _expected = new Error('kaboom'); - - _transform2.on('finish', common.mustNotCall('no finish event')); - - _transform2.on('close', common.mustCall()); - - _transform2.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected); - })); - - _transform2.destroy(_expected); -} -{ - var _expected2 = new Error('kaboom'); - - var _transform3 = new Transform({ - transform: function transform(chunk, enc, cb) {}, - destroy: common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected2); - cb(); - }, 1) - }); - - _transform3.resume(); - - _transform3.on('end', common.mustNotCall('no end event')); - - _transform3.on('close', common.mustCall()); - - _transform3.on('finish', common.mustNotCall('no finish event')); // error is swallowed by the custom _destroy - - - _transform3.on('error', common.mustNotCall('no error event')); - - _transform3.destroy(_expected2); -} -{ - var _transform4 = new Transform({ - transform: function transform(chunk, enc, cb) {} - }); - - _transform4._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(); - }, 1); - - _transform4.destroy(); -} -{ - var _transform5 = new Transform({ - transform: function transform(chunk, enc, cb) {} - }); - - _transform5.resume(); - - _transform5._destroy = common.mustCall(function (err, cb) { - var _this = this; - - assert.strictEqual(err, null); - process.nextTick(function () { - _this.push(null); - - _this.end(); - - cb(); - }); - }, 1); - var fail = common.mustNotCall('no event'); - - _transform5.on('finish', fail); - - _transform5.on('end', fail); - - _transform5.on('close', common.mustCall()); - - _transform5.destroy(); - - _transform5.removeListener('end', fail); - - _transform5.removeListener('finish', fail); - - _transform5.on('end', common.mustCall()); - - _transform5.on('finish', common.mustCall()); -} -{ - var _transform6 = new Transform({ - transform: function transform(chunk, enc, cb) {} - }); - - var _expected3 = new Error('kaboom'); - - _transform6._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(_expected3); - }, 1); - - _transform6.on('close', common.mustCall()); - - _transform6.on('finish', common.mustNotCall('no finish event')); - - _transform6.on('end', common.mustNotCall('no end event')); - - _transform6.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected3); - })); - - _transform6.destroy(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-final-sync.js b/test/parallel/test-stream-transform-final-sync.js deleted file mode 100644 index 39c0b46a98..0000000000 --- a/test/parallel/test-stream-transform-final-sync.js +++ /dev/null @@ -1,134 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var state = 0; -/* -What you do -var stream = new stream.Transform({ - transform: function transformCallback(chunk, _, next) { - // part 1 - this.push(chunk); - //part 2 - next(); - }, - final: function endCallback(done) { - // part 1 - process.nextTick(function () { - // part 2 - done(); - }); - }, - flush: function flushCallback(done) { - // part 1 - process.nextTick(function () { - // part 2 - done(); - }); - } -}); -t.on('data', dataListener); -t.on('end', endListener); -t.on('finish', finishListener); -t.write(1); -t.write(4); -t.end(7, endMethodCallback); - -The order things are called - -1. transformCallback part 1 -2. dataListener -3. transformCallback part 2 -4. transformCallback part 1 -5. dataListener -6. transformCallback part 2 -7. transformCallback part 1 -8. dataListener -9. transformCallback part 2 -10. finalCallback part 1 -11. finalCallback part 2 -12. flushCallback part 1 -13. finishListener -14. endMethodCallback -15. flushCallback part 2 -16. endListener -*/ - -var t = new stream.Transform({ - objectMode: true, - transform: common.mustCall(function (chunk, _, next) { - // transformCallback part 1 - assert.strictEqual(++state, chunk); - this.push(state); // transformCallback part 2 - - assert.strictEqual(++state, chunk + 2); - process.nextTick(next); - }, 3), - final: common.mustCall(function (done) { - state++; // finalCallback part 1 - - assert.strictEqual(state, 10); - state++; // finalCallback part 2 - - assert.strictEqual(state, 11); - done(); - }, 1), - flush: common.mustCall(function (done) { - state++; // fluchCallback part 1 - - assert.strictEqual(state, 12); - process.nextTick(function () { - state++; // fluchCallback part 2 - - assert.strictEqual(state, 15); - done(); - }); - }, 1) -}); -t.on('finish', common.mustCall(function () { - state++; // finishListener - - assert.strictEqual(state, 13); -}, 1)); -t.on('end', common.mustCall(function () { - state++; // endEvent - - assert.strictEqual(state, 16); -}, 1)); -t.on('data', common.mustCall(function (d) { - // dataListener - assert.strictEqual(++state, d + 1); -}, 3)); -t.write(1); -t.write(4); -t.end(7, common.mustCall(function () { - state++; // endMethodCallback - - assert.strictEqual(state, 14); -}, 1)); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js deleted file mode 100644 index 37b270ca8f..0000000000 --- a/test/parallel/test-stream-transform-final.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var state = 0; -/* -What you do -var stream = new stream.Transform({ - transform: function transformCallback(chunk, _, next) { - // part 1 - this.push(chunk); - //part 2 - next(); - }, - final: function endCallback(done) { - // part 1 - process.nextTick(function () { - // part 2 - done(); - }); - }, - flush: function flushCallback(done) { - // part 1 - process.nextTick(function () { - // part 2 - done(); - }); - } -}); -t.on('data', dataListener); -t.on('end', endListener); -t.on('finish', finishListener); -t.write(1); -t.write(4); -t.end(7, endMethodCallback); - -The order things are called - -1. transformCallback part 1 -2. dataListener -3. transformCallback part 2 -4. transformCallback part 1 -5. dataListener -6. transformCallback part 2 -7. transformCallback part 1 -8. dataListener -9. transformCallback part 2 -10. finalCallback part 1 -11. finalCallback part 2 -12. flushCallback part 1 -13. finishListener -14. endMethodCallback -15. flushCallback part 2 -16. endListener -*/ - -var t = new stream.Transform({ - objectMode: true, - transform: common.mustCall(function (chunk, _, next) { - // transformCallback part 1 - assert.strictEqual(++state, chunk); - this.push(state); // transformCallback part 2 - - assert.strictEqual(++state, chunk + 2); - process.nextTick(next); - }, 3), - final: common.mustCall(function (done) { - state++; // finalCallback part 1 - - assert.strictEqual(state, 10); - setTimeout(function () { - state++; // finalCallback part 2 - - assert.strictEqual(state, 11); - done(); - }, 100); - }, 1), - flush: common.mustCall(function (done) { - state++; // flushCallback part 1 - - assert.strictEqual(state, 12); - process.nextTick(function () { - state++; // flushCallback part 2 - - assert.strictEqual(state, 15); - done(); - }); - }, 1) -}); -t.on('finish', common.mustCall(function () { - state++; // finishListener - - assert.strictEqual(state, 13); -}, 1)); -t.on('end', common.mustCall(function () { - state++; // end event - - assert.strictEqual(state, 16); -}, 1)); -t.on('data', common.mustCall(function (d) { - // dataListener - assert.strictEqual(++state, d + 1); -}, 3)); -t.write(1); -t.write(4); -t.end(7, common.mustCall(function () { - state++; // endMethodCallback - - assert.strictEqual(state, 14); -}, 1)); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js deleted file mode 100644 index a6f9810315..0000000000 --- a/test/parallel/test-stream-transform-flush-data.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var Transform = require('../../').Transform; - -var expected = 'asdf'; - -function _transform(d, e, n) { - n(); -} - -function _flush(n) { - n(null, expected); -} - -var t = new Transform({ - transform: _transform, - flush: _flush -}); -t.end(bufferShim.from('blerg')); -t.on('data', function (data) { - assert.strictEqual(data.toString(), expected); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js deleted file mode 100644 index 39ec6c3c8f..0000000000 --- a/test/parallel/test-stream-transform-objectmode-falsey-value.js +++ /dev/null @@ -1,77 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var PassThrough = stream.PassThrough; -var src = new PassThrough({ - objectMode: true -}); -var tx = new PassThrough({ - objectMode: true -}); -var dest = new PassThrough({ - objectMode: true -}); -var expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; -var results = []; -dest.on('data', common.mustCall(function (x) { - results.push(x); -}, expect.length)); -src.pipe(tx).pipe(dest); -var i = -1; -var int = setInterval(common.mustCall(function () { - if (results.length === expect.length) { - src.end(); - clearInterval(int); - assert.deepStrictEqual(results, expect); - } else { - src.write(i++); - } -}, expect.length + 1), 1); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js deleted file mode 100644 index 5de64157aa..0000000000 --- a/test/parallel/test-stream-transform-split-highwatermark.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Transform = _require.Transform, - Readable = _require.Readable, - Writable = _require.Writable; - -var DEFAULT = 16 * 1024; - -function testTransform(expectedReadableHwm, expectedWritableHwm, options) { - var t = new Transform(options); - assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm); - assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm); -} // test overriding defaultHwm - - -testTransform(666, DEFAULT, { - readableHighWaterMark: 666 -}); -testTransform(DEFAULT, 777, { - writableHighWaterMark: 777 -}); -testTransform(666, 777, { - readableHighWaterMark: 666, - writableHighWaterMark: 777 -}); // test 0 overriding defaultHwm - -testTransform(0, DEFAULT, { - readableHighWaterMark: 0 -}); -testTransform(DEFAULT, 0, { - writableHighWaterMark: 0 -}); // test highWaterMark overriding - -testTransform(555, 555, { - highWaterMark: 555, - readableHighWaterMark: 666 -}); -testTransform(555, 555, { - highWaterMark: 555, - writableHighWaterMark: 777 -}); -testTransform(555, 555, { - highWaterMark: 555, - readableHighWaterMark: 666, - writableHighWaterMark: 777 -}); // test highWaterMark = 0 overriding - -testTransform(0, 0, { - highWaterMark: 0, - readableHighWaterMark: 666 -}); -testTransform(0, 0, { - highWaterMark: 0, - writableHighWaterMark: 777 -}); -testTransform(0, 0, { - highWaterMark: 0, - readableHighWaterMark: 666, - writableHighWaterMark: 777 -}); // test undefined, null - -[undefined, null].forEach(function (v) { - testTransform(DEFAULT, DEFAULT, { - readableHighWaterMark: v - }); - testTransform(DEFAULT, DEFAULT, { - writableHighWaterMark: v - }); - testTransform(666, DEFAULT, { - highWaterMark: v, - readableHighWaterMark: 666 - }); - testTransform(DEFAULT, 777, { - highWaterMark: v, - writableHighWaterMark: 777 - }); -}); // test NaN - -{ - common.expectsError(function () { - new Transform({ - readableHighWaterMark: NaN - }); - }, { - type: TypeError, - code: 'ERR_INVALID_OPT_VALUE', - message: 'The value "NaN" is invalid for option "readableHighWaterMark"' - }); - common.expectsError(function () { - new Transform({ - writableHighWaterMark: NaN - }); - }, { - type: TypeError, - code: 'ERR_INVALID_OPT_VALUE', - message: 'The value "NaN" is invalid for option "writableHighWaterMark"' - }); -} // test non Duplex streams ignore the options - -{ - var r = new Readable({ - readableHighWaterMark: 666 - }); - assert.strictEqual(r._readableState.highWaterMark, DEFAULT); - var w = new Writable({ - writableHighWaterMark: 777 - }); - assert.strictEqual(w._writableState.highWaterMark, DEFAULT); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js deleted file mode 100644 index e2349c1862..0000000000 --- a/test/parallel/test-stream-transform-split-objectmode.js +++ /dev/null @@ -1,99 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var Transform = require('../../').Transform; - -var parser = new Transform({ - readableObjectMode: true -}); -assert(parser._readableState.objectMode); -assert(!parser._writableState.objectMode); -assert.strictEqual(parser.readableHighWaterMark, 16); -assert.strictEqual(parser.writableHighWaterMark, 16 * 1024); -assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark); -assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark); - -parser._transform = function (chunk, enc, callback) { - callback(null, { - val: chunk[0] - }); -}; - -var parsed; -parser.on('data', function (obj) { - parsed = obj; -}); -parser.end(bufferShim.from([42])); -process.on('exit', function () { - assert.strictEqual(parsed.val, 42); -}); -var serializer = new Transform({ - writableObjectMode: true -}); -assert(!serializer._readableState.objectMode); -assert(serializer._writableState.objectMode); -assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024); -assert.strictEqual(serializer.writableHighWaterMark, 16); -assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark); -assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark); - -serializer._transform = function (obj, _, callback) { - callback(null, bufferShim.from([obj.val])); -}; - -var serialized; -serializer.on('data', function (chunk) { - serialized = chunk; -}); -serializer.write({ - val: 42 -}); -process.on('exit', function () { - assert.strictEqual(serialized[0], 42); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-uint8array.js b/test/parallel/test-stream-uint8array.js deleted file mode 100644 index b5b767270c..0000000000 --- a/test/parallel/test-stream-uint8array.js +++ /dev/null @@ -1,131 +0,0 @@ -"use strict"; - -function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); } - -function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance"); } - -function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); } - -function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable; - -var ABC = new Uint8Array([0x41, 0x42, 0x43]); -var DEF = new Uint8Array([0x44, 0x45, 0x46]); -var GHI = new Uint8Array([0x47, 0x48, 0x49]); -{ - // Simple Writable test. - var n = 0; - var writable = new Writable({ - write: common.mustCall(function (chunk, encoding, cb) { - assert(chunk instanceof Buffer); - - if (n++ === 0) { - assert.strictEqual(String(chunk), 'ABC'); - } else { - assert.strictEqual(String(chunk), 'DEF'); - } - - cb(); - }, 2) - }); - writable.write(ABC); - writable.end(DEF); -} -{ - // Writable test, pass in Uint8Array in object mode. - var _writable = new Writable({ - objectMode: true, - write: common.mustCall(function (chunk, encoding, cb) { - assert(!(chunk instanceof Buffer)); - assert(chunk instanceof Uint8Array); - assert.strictEqual(chunk, ABC); - assert.strictEqual(encoding, 'utf8'); - cb(); - }) - }); - - _writable.end(ABC); -} -{ - // Writable test, multiple writes carried out via writev. - var callback; - - var _writable2 = new Writable({ - write: common.mustCall(function (chunk, encoding, cb) { - assert(chunk instanceof Buffer); - assert.strictEqual(encoding, 'buffer'); - assert.strictEqual(String(chunk), 'ABC'); - callback = cb; - }), - writev: common.mustCall(function (chunks, cb) { - assert.strictEqual(chunks.length, 2); - assert.strictEqual(chunks[0].encoding, 'buffer'); - assert.strictEqual(chunks[1].encoding, 'buffer'); - assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI'); - }) - }); - - _writable2.write(ABC); - - _writable2.write(DEF); - - _writable2.end(GHI); - - callback(); -} -{ - // Simple Readable test. - var readable = new Readable({ - read: function read() {} - }); - readable.push(DEF); - readable.unshift(ABC); - var buf = readable.read(); - assert(buf instanceof Buffer); - assert.deepStrictEqual(_toConsumableArray(buf), [].concat(_toConsumableArray(ABC), _toConsumableArray(DEF))); -} -{ - // Readable test, setEncoding. - var _readable = new Readable({ - read: function read() {} - }); - - _readable.setEncoding('utf8'); - - _readable.push(DEF); - - _readable.unshift(ABC); - - var out = _readable.read(); - - assert.strictEqual(out, 'ABCDEF'); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js deleted file mode 100644 index 797e03182d..0000000000 --- a/test/parallel/test-stream-unpipe-event.js +++ /dev/null @@ -1,209 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -if (process.version.indexOf('v0.8') === 0) { - process.exit(0); -} -/**/ - - -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Writable = _require.Writable, - Readable = _require.Readable; - -var NullWriteable = -/*#__PURE__*/ -function (_Writable) { - _inherits(NullWriteable, _Writable); - - function NullWriteable() { - _classCallCheck(this, NullWriteable); - - return _possibleConstructorReturn(this, _getPrototypeOf(NullWriteable).apply(this, arguments)); - } - - _createClass(NullWriteable, [{ - key: "_write", - value: function _write(chunk, encoding, callback) { - return callback(); - } - }]); - - return NullWriteable; -}(Writable); - -var QuickEndReadable = -/*#__PURE__*/ -function (_Readable) { - _inherits(QuickEndReadable, _Readable); - - function QuickEndReadable() { - _classCallCheck(this, QuickEndReadable); - - return _possibleConstructorReturn(this, _getPrototypeOf(QuickEndReadable).apply(this, arguments)); - } - - _createClass(QuickEndReadable, [{ - key: "_read", - value: function _read() { - this.push(null); - } - }]); - - return QuickEndReadable; -}(Readable); - -var NeverEndReadable = -/*#__PURE__*/ -function (_Readable2) { - _inherits(NeverEndReadable, _Readable2); - - function NeverEndReadable() { - _classCallCheck(this, NeverEndReadable); - - return _possibleConstructorReturn(this, _getPrototypeOf(NeverEndReadable).apply(this, arguments)); - } - - _createClass(NeverEndReadable, [{ - key: "_read", - value: function _read() {} - }]); - - return NeverEndReadable; -}(Readable); - -{ - var dest = new NullWriteable(); - var src = new QuickEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustCall()); - src.pipe(dest); - setImmediate(function () { - assert.strictEqual(src._readableState.pipesCount, 0); - }); -} -{ - var _dest = new NullWriteable(); - - var _src = new NeverEndReadable(); - - _dest.on('pipe', common.mustCall()); - - _dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); - - _src.pipe(_dest); - - setImmediate(function () { - assert.strictEqual(_src._readableState.pipesCount, 1); - }); -} -{ - var _dest2 = new NullWriteable(); - - var _src2 = new NeverEndReadable(); - - _dest2.on('pipe', common.mustCall()); - - _dest2.on('unpipe', common.mustCall()); - - _src2.pipe(_dest2); - - _src2.unpipe(_dest2); - - setImmediate(function () { - assert.strictEqual(_src2._readableState.pipesCount, 0); - }); -} -{ - var _dest3 = new NullWriteable(); - - var _src3 = new QuickEndReadable(); - - _dest3.on('pipe', common.mustCall()); - - _dest3.on('unpipe', common.mustCall()); - - _src3.pipe(_dest3, { - end: false - }); - - setImmediate(function () { - assert.strictEqual(_src3._readableState.pipesCount, 0); - }); -} -{ - var _dest4 = new NullWriteable(); - - var _src4 = new NeverEndReadable(); - - _dest4.on('pipe', common.mustCall()); - - _dest4.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); - - _src4.pipe(_dest4, { - end: false - }); - - setImmediate(function () { - assert.strictEqual(_src4._readableState.pipesCount, 1); - }); -} -{ - var _dest5 = new NullWriteable(); - - var _src5 = new NeverEndReadable(); - - _dest5.on('pipe', common.mustCall()); - - _dest5.on('unpipe', common.mustCall()); - - _src5.pipe(_dest5, { - end: false - }); - - _src5.unpipe(_dest5); - - setImmediate(function () { - assert.strictEqual(_src5._readableState.pipesCount, 0); - }); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js deleted file mode 100644 index 2157dff99f..0000000000 --- a/test/parallel/test-stream-unshift-empty-chunk.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); // This test verifies that stream.unshift(bufferShim.alloc(0)) or -// stream.unshift('') does not set state.reading=false. - - -var Readable = require('../../').Readable; - -var r = new Readable(); -var nChunks = 10; -var chunk = bufferShim.alloc(10, 'x'); - -r._read = function (n) { - setImmediate(function () { - r.push(--nChunks === 0 ? null : chunk); - }); -}; - -var readAll = false; -var seen = []; -r.on('readable', function () { - var chunk; - - while (chunk = r.read()) { - seen.push(chunk.toString()); // simulate only reading a certain amount of the data, - // and then putting the rest of the chunk back into the - // stream, like a parser might do. We just fill it with - // 'y' so that it's easy to see which bits were touched, - // and which were not. - - var putBack = bufferShim.alloc(readAll ? 0 : 5, 'y'); - readAll = !readAll; - r.unshift(putBack); - } -}); -var expect = ['xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy']; -r.on('end', function () { - assert.deepStrictEqual(seen, expect); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js deleted file mode 100644 index fc0c64df48..0000000000 --- a/test/parallel/test-stream-unshift-read-race.js +++ /dev/null @@ -1,174 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); // This test verifies that: -// 1. unshift() does not cause colliding _read() calls. -// 2. unshift() after the 'end' event is an error, but after the EOF -// signalling null, it is ok, and just creates a new readable chunk. -// 3. push() after the EOF signaling null is an error. -// 4. _read() is not called after pushing the EOF null chunk. - - -var stream = require('../../'); - -var hwm = 10; -var r = stream.Readable({ - highWaterMark: hwm -}); -var chunks = 10; -var data = bufferShim.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2)); - -for (var i = 0; i < data.length; i++) { - var c = 'asdf'.charCodeAt(i % 4); - data[i] = c; -} - -var pos = 0; -var pushedNull = false; - -r._read = function (n) { - assert(!pushedNull, '_read after null push'); // every third chunk is fast - - push(!(chunks % 3)); - - function push(fast) { - assert(!pushedNull, 'push() after null push'); - var c = pos >= data.length ? null : data.slice(pos, pos + n); - pushedNull = c === null; - - if (fast) { - pos += n; - r.push(c); - if (c === null) pushError(); - } else { - setTimeout(function () { - pos += n; - r.push(c); - if (c === null) pushError(); - }, 1); - } - } -}; - -function pushError() { - common.expectsError(function () { - r.push(bufferShim.allocUnsafe(1)); - }, { - code: 'ERR_STREAM_PUSH_AFTER_EOF', - type: Error, - message: 'stream.push() after EOF' - }); -} - -var w = stream.Writable(); -var written = []; - -w._write = function (chunk, encoding, cb) { - written.push(chunk.toString()); - cb(); -}; - -r.on('end', common.mustCall(function () { - common.expectsError(function () { - r.unshift(bufferShim.allocUnsafe(1)); - }, { - code: 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT', - type: Error, - message: 'stream.unshift() after end event' - }); - w.end(); -})); -r.on('readable', function () { - var chunk; - - while (null !== (chunk = r.read(10))) { - w.write(chunk); - if (chunk.length > 4) r.unshift(bufferShim.from('1234')); - } -}); -w.on('finish', common.mustCall(function () { - // each chunk should start with 1234, and then be asfdasdfasdf... - // The first got pulled out before the first unshift('1234'), so it's - // lacking that piece. - assert.strictEqual(written[0], 'asdfasdfas'); - var asdf = 'd'; - console.error("0: ".concat(written[0])); - - for (var _i = 1; _i < written.length; _i++) { - console.error("".concat(_i.toString(32), ": ").concat(written[_i])); - assert.strictEqual(written[_i].slice(0, 4), '1234'); - - for (var j = 4; j < written[_i].length; j++) { - var _c = written[_i].charAt(j); - - assert.strictEqual(_c, asdf); - - switch (asdf) { - case 'a': - asdf = 's'; - break; - - case 's': - asdf = 'd'; - break; - - case 'd': - asdf = 'f'; - break; - - case 'f': - asdf = 'a'; - break; - } - } - } -})); -process.on('exit', function () { - assert.strictEqual(written.length, 18); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js deleted file mode 100644 index 441dc637e5..0000000000 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ /dev/null @@ -1,138 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var MyWritable = -/*#__PURE__*/ -function (_stream$Writable) { - _inherits(MyWritable, _stream$Writable); - - function MyWritable(fn, options) { - var _this; - - _classCallCheck(this, MyWritable); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(MyWritable).call(this, options)); - _this.fn = fn; - return _this; - } - - _createClass(MyWritable, [{ - key: "_write", - value: function _write(chunk, encoding, callback) { - this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); - callback(); - } - }]); - - return MyWritable; -}(stream.Writable); - -(function defaultCondingIsUtf8() { - var m = new MyWritable(function (isBuffer, type, enc) { - assert.strictEqual(enc, 'utf8'); - }, { - decodeStrings: false - }); - m.write('foo'); - m.end(); -})(); - -(function changeDefaultEncodingToAscii() { - var m = new MyWritable(function (isBuffer, type, enc) { - assert.strictEqual(enc, 'ascii'); - }, { - decodeStrings: false - }); - m.setDefaultEncoding('ascii'); - m.write('bar'); - m.end(); -})(); - -common.expectsError(function changeDefaultEncodingToInvalidValue() { - var m = new MyWritable(function (isBuffer, type, enc) {}, { - decodeStrings: false - }); - m.setDefaultEncoding({}); - m.write('bar'); - m.end(); -}, { - type: TypeError, - code: 'ERR_UNKNOWN_ENCODING', - message: 'Unknown encoding: [object Object]' -}); - -(function checkVairableCaseEncoding() { - var m = new MyWritable(function (isBuffer, type, enc) { - assert.strictEqual(enc, 'ascii'); - }, { - decodeStrings: false - }); - m.setDefaultEncoding('AsCii'); - m.write('bar'); - m.end(); -})(); - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js deleted file mode 100644 index 8d4938822b..0000000000 --- a/test/parallel/test-stream-writable-constructor-set-methods.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('assert/'), - strictEqual = _require.strictEqual; - -var _require2 = require('../../'), - Writable = _require2.Writable; - -var w = new Writable(); -w.on('error', common.expectsError({ - type: Error, - code: 'ERR_METHOD_NOT_IMPLEMENTED', - message: 'The _write() method is not implemented' -})); -w.end(bufferShim.from('blerg')); - -var _write = common.mustCall(function (chunk, _, next) { - next(); -}); - -var _writev = common.mustCall(function (chunks, next) { - strictEqual(chunks.length, 2); - next(); -}); - -var w2 = new Writable({ - write: _write, - writev: _writev -}); -strictEqual(w2._write, _write); -strictEqual(w2._writev, _writev); -w2.write(bufferShim.from('blerg')); -w2.cork(); -w2.write(bufferShim.from('blerg')); -w2.write(bufferShim.from('blerg')); -w2.end(); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js deleted file mode 100644 index 1bf9ed6ea3..0000000000 --- a/test/parallel/test-stream-writable-decoded-encoding.js +++ /dev/null @@ -1,117 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var MyWritable = -/*#__PURE__*/ -function (_stream$Writable) { - _inherits(MyWritable, _stream$Writable); - - function MyWritable(fn, options) { - var _this; - - _classCallCheck(this, MyWritable); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(MyWritable).call(this, options)); - _this.fn = fn; - return _this; - } - - _createClass(MyWritable, [{ - key: "_write", - value: function _write(chunk, encoding, callback) { - this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); - callback(); - } - }]); - - return MyWritable; -}(stream.Writable); - -{ - var m = new MyWritable(function (isBuffer, type, enc) { - assert(isBuffer); - assert.strictEqual(type, 'object'); - assert.strictEqual(enc, 'buffer'); - }, { - decodeStrings: true - }); - m.write('some-text', 'utf8'); - m.end(); -} -{ - var _m = new MyWritable(function (isBuffer, type, enc) { - assert(!isBuffer); - assert.strictEqual(type, 'string'); - assert.strictEqual(enc, 'utf8'); - }, { - decodeStrings: false - }); - - _m.write('some-text', 'utf8'); - - _m.end(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js deleted file mode 100644 index dfc88f8166..0000000000 --- a/test/parallel/test-stream-writable-destroy.js +++ /dev/null @@ -1,286 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Writable = _require.Writable; - -var assert = require('assert/'); - -{ - var write = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - write.on('finish', common.mustNotCall()); - write.on('close', common.mustCall()); - write.destroy(); - assert.strictEqual(write.destroyed, true); -} -{ - var _write = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - var expected = new Error('kaboom'); - - _write.on('finish', common.mustNotCall()); - - _write.on('close', common.mustCall()); - - _write.on('error', common.mustCall(function (err) { - assert.strictEqual(err, expected); - })); - - _write.destroy(expected); - - assert.strictEqual(_write.destroyed, true); -} -{ - var _write2 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - _write2._destroy = function (err, cb) { - assert.strictEqual(err, _expected); - cb(err); - }; - - var _expected = new Error('kaboom'); - - _write2.on('finish', common.mustNotCall('no finish event')); - - _write2.on('close', common.mustCall()); - - _write2.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected); - })); - - _write2.destroy(_expected); - - assert.strictEqual(_write2.destroyed, true); -} -{ - var _write3 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - }, - destroy: common.mustCall(function (err, cb) { - assert.strictEqual(err, _expected2); - cb(); - }) - }); - - var _expected2 = new Error('kaboom'); - - _write3.on('finish', common.mustNotCall('no finish event')); - - _write3.on('close', common.mustCall()); // error is swallowed by the custom _destroy - - - _write3.on('error', common.mustNotCall('no error event')); - - _write3.destroy(_expected2); - - assert.strictEqual(_write3.destroyed, true); -} -{ - var _write4 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - _write4._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(); - }); - - _write4.destroy(); - - assert.strictEqual(_write4.destroyed, true); -} -{ - var _write5 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - _write5._destroy = common.mustCall(function (err, cb) { - var _this = this; - - assert.strictEqual(err, null); - process.nextTick(function () { - _this.end(); - - cb(); - }); - }); - var fail = common.mustNotCall('no finish event'); - - _write5.on('finish', fail); - - _write5.on('close', common.mustCall()); - - _write5.destroy(); - - _write5.removeListener('finish', fail); - - _write5.on('finish', common.mustCall()); - - assert.strictEqual(_write5.destroyed, true); -} -{ - var _write6 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - var _expected3 = new Error('kaboom'); - - _write6._destroy = common.mustCall(function (err, cb) { - assert.strictEqual(err, null); - cb(_expected3); - }); - - _write6.on('close', common.mustCall()); - - _write6.on('finish', common.mustNotCall('no finish event')); - - _write6.on('error', common.mustCall(function (err) { - assert.strictEqual(err, _expected3); - })); - - _write6.destroy(); - - assert.strictEqual(_write6.destroyed, true); -} -{ - // double error case - var _write7 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - _write7.on('close', common.mustCall()); - - _write7.on('error', common.mustCall()); - - _write7.destroy(new Error('kaboom 1')); - - _write7.destroy(new Error('kaboom 2')); - - assert.strictEqual(_write7._writableState.errorEmitted, true); - assert.strictEqual(_write7.destroyed, true); -} -{ - var writable = new Writable({ - destroy: common.mustCall(function (err, cb) { - process.nextTick(cb, new Error('kaboom 1')); - }), - write: function write(chunk, enc, cb) { - cb(); - } - }); - writable.on('close', common.mustCall()); - writable.on('error', common.expectsError({ - type: Error, - message: 'kaboom 2' - })); - writable.destroy(); - assert.strictEqual(writable.destroyed, true); - assert.strictEqual(writable._writableState.errorEmitted, false); // Test case where `writable.destroy()` is called again with an error before - // the `_destroy()` callback is called. - - writable.destroy(new Error('kaboom 2')); - assert.strictEqual(writable._writableState.errorEmitted, true); -} -{ - var _write8 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - _write8.destroyed = true; - assert.strictEqual(_write8.destroyed, true); // the internal destroy() mechanism should not be triggered - - _write8.on('close', common.mustNotCall()); - - _write8.destroy(); -} -{ - function MyWritable() { - assert.strictEqual(this.destroyed, false); - this.destroyed = false; - Writable.call(this); - } - - Object.setPrototypeOf(MyWritable.prototype, Writable.prototype); - Object.setPrototypeOf(MyWritable, Writable); - new MyWritable(); -} -{ - // destroy and destroy callback - var _write9 = new Writable({ - write: function write(chunk, enc, cb) { - cb(); - } - }); - - _write9.destroy(); - - var _expected4 = new Error('kaboom'); - - _write9.destroy(_expected4, common.mustCall(function (err) { - assert.strictEqual(err, _expected4); - })); -} -{ - // Checks that `._undestroy()` restores the state so that `final` will be - // called again. - var _write10 = new Writable({ - write: common.mustNotCall(), - final: common.mustCall(function (cb) { - return cb(); - }, 2) - }); - - _write10.end(); - - _write10.destroy(); - - _write10._undestroy(); - - _write10.end(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js deleted file mode 100644 index ac8c6f44b4..0000000000 --- a/test/parallel/test-stream-writable-ended-state.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var writable = new stream.Writable(); - -writable._write = function (chunk, encoding, cb) { - assert.strictEqual(writable._writableState.ended, false); - cb(); -}; - -assert.strictEqual(writable._writableState.ended, false); -writable.end('testing ended state', common.mustCall(function () { - assert.strictEqual(writable._writableState.ended, true); -})); -assert.strictEqual(writable._writableState.ended, true); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js deleted file mode 100644 index 641602a093..0000000000 --- a/test/parallel/test-stream-writable-finished-state.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var writable = new stream.Writable(); - -writable._write = function (chunk, encoding, cb) { - // The state finished should start in false. - assert.strictEqual(writable._writableState.finished, false); - cb(); -}; - -writable.on('finish', common.mustCall(function () { - assert.strictEqual(writable._writableState.finished, true); -})); -writable.end('testing finished state', common.mustCall(function () { - assert.strictEqual(writable._writableState.finished, true); -})); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-needdrain-state.js b/test/parallel/test-stream-writable-needdrain-state.js deleted file mode 100644 index 0dc2b1152b..0000000000 --- a/test/parallel/test-stream-writable-needdrain-state.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var stream = require('../../'); - -var assert = require('assert/'); - -var transform = new stream.Transform({ - transform: _transform, - highWaterMark: 1 -}); - -function _transform(chunk, encoding, cb) { - assert.strictEqual(transform._writableState.needDrain, true); - cb(); -} - -assert.strictEqual(transform._writableState.needDrain, false); -transform.write('asdasd', common.mustCall(function () { - assert.strictEqual(transform._writableState.needDrain, false); -})); -assert.strictEqual(transform._writableState.needDrain, true); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js deleted file mode 100644 index a6e8794a33..0000000000 --- a/test/parallel/test-stream-writable-null.js +++ /dev/null @@ -1,120 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var MyWritable = -/*#__PURE__*/ -function (_stream$Writable) { - _inherits(MyWritable, _stream$Writable); - - function MyWritable() { - _classCallCheck(this, MyWritable); - - return _possibleConstructorReturn(this, _getPrototypeOf(MyWritable).apply(this, arguments)); - } - - _createClass(MyWritable, [{ - key: "_write", - value: function _write(chunk, encoding, callback) { - assert.notStrictEqual(chunk, null); - callback(); - } - }]); - - return MyWritable; -}(stream.Writable); - -common.expectsError(function () { - var m = new MyWritable({ - objectMode: true - }); - m.write(null, function (err) { - return assert.ok(err); - }); -}, { - code: 'ERR_STREAM_NULL_VALUES', - type: TypeError, - message: 'May not write null values to stream' -}); -{ - // Should not throw. - var m = new MyWritable({ - objectMode: true - }).on('error', assert); - m.write(null, assert); -} -common.expectsError(function () { - var m = new MyWritable(); - m.write(false, function (err) { - return assert.ok(err); - }); -}, { - code: 'ERR_INVALID_ARG_TYPE', - type: TypeError -}); -{ - // Should not throw. - var _m = new MyWritable().on('error', assert); - - _m.write(false, assert); -} -{ - // Should not throw. - var _m2 = new MyWritable({ - objectMode: true - }); - - _m2.write(false, assert.ifError); -} -{ - // Should not throw. - var _m3 = new MyWritable({ - objectMode: true - }).on('error', function (e) { - assert.ifError(e || new Error('should not get here')); - }); - - _m3.write(false, assert.ifError); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-write-cb-twice.js b/test/parallel/test-stream-writable-write-cb-twice.js deleted file mode 100644 index 9f3fbc3ebc..0000000000 --- a/test/parallel/test-stream-writable-write-cb-twice.js +++ /dev/null @@ -1,74 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var _require = require('../../'), - Writable = _require.Writable; - -{ - // Sync + Sync - var writable = new Writable({ - write: common.mustCall(function (buf, enc, cb) { - cb(); - common.expectsError(cb, { - code: 'ERR_MULTIPLE_CALLBACK', - type: Error - }); - }) - }); - writable.write('hi'); -} -{ - // Sync + Async - var _writable = new Writable({ - write: common.mustCall(function (buf, enc, cb) { - cb(); - process.nextTick(function () { - common.expectsError(cb, { - code: 'ERR_MULTIPLE_CALLBACK', - type: Error - }); - }); - }) - }); - - _writable.write('hi'); -} -{ - // Async + Async - var _writable2 = new Writable({ - write: common.mustCall(function (buf, enc, cb) { - process.nextTick(cb); - process.nextTick(function () { - common.expectsError(cb, { - code: 'ERR_MULTIPLE_CALLBACK', - type: Error - }); - }); - }) - }); - - _writable2.write('hi'); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-write-writev-finish.js b/test/parallel/test-stream-writable-write-writev-finish.js deleted file mode 100644 index 135230fe10..0000000000 --- a/test/parallel/test-stream-writable-write-writev-finish.js +++ /dev/null @@ -1,209 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); // ensure consistency between the finish event when using cork() -// and writev and when not using them - - -{ - var writable = new stream.Writable(); - - writable._write = function (chunks, encoding, cb) { - cb(new Error('write test error')); - }; - - var firstError = false; - writable.on('finish', common.mustCall(function () { - assert.strictEqual(firstError, true); - })); - writable.on('prefinish', common.mustCall()); - writable.on('error', common.mustCall(function (er) { - assert.strictEqual(er.message, 'write test error'); - firstError = true; - })); - writable.end('test'); -} -{ - var _writable = new stream.Writable(); - - _writable._write = function (chunks, encoding, cb) { - setImmediate(cb, new Error('write test error')); - }; - - var _firstError = false; - - _writable.on('finish', common.mustCall(function () { - assert.strictEqual(_firstError, true); - })); - - _writable.on('prefinish', common.mustCall()); - - _writable.on('error', common.mustCall(function (er) { - assert.strictEqual(er.message, 'write test error'); - _firstError = true; - })); - - _writable.end('test'); -} -{ - var _writable2 = new stream.Writable(); - - _writable2._write = function (chunks, encoding, cb) { - cb(new Error('write test error')); - }; - - _writable2._writev = function (chunks, cb) { - cb(new Error('writev test error')); - }; - - var _firstError2 = false; - - _writable2.on('finish', common.mustCall(function () { - assert.strictEqual(_firstError2, true); - })); - - _writable2.on('prefinish', common.mustCall()); - - _writable2.on('error', common.mustCall(function (er) { - assert.strictEqual(er.message, 'writev test error'); - _firstError2 = true; - })); - - _writable2.cork(); - - _writable2.write('test'); - - setImmediate(function () { - _writable2.end('test'); - }); -} -{ - var _writable3 = new stream.Writable(); - - _writable3._write = function (chunks, encoding, cb) { - setImmediate(cb, new Error('write test error')); - }; - - _writable3._writev = function (chunks, cb) { - setImmediate(cb, new Error('writev test error')); - }; - - var _firstError3 = false; - - _writable3.on('finish', common.mustCall(function () { - assert.strictEqual(_firstError3, true); - })); - - _writable3.on('prefinish', common.mustCall()); - - _writable3.on('error', common.mustCall(function (er) { - assert.strictEqual(er.message, 'writev test error'); - _firstError3 = true; - })); - - _writable3.cork(); - - _writable3.write('test'); - - setImmediate(function () { - _writable3.end('test'); - }); -} // Regression test for -// https://github.com/nodejs/node/issues/13812 - -{ - var rs = new stream.Readable(); - rs.push('ok'); - rs.push(null); - - rs._read = function () {}; - - var ws = new stream.Writable(); - var _firstError4 = false; - ws.on('finish', common.mustCall(function () { - assert.strictEqual(_firstError4, true); - })); - ws.on('error', common.mustCall(function () { - _firstError4 = true; - })); - - ws._write = function (chunk, encoding, done) { - setImmediate(done, new Error()); - }; - - rs.pipe(ws); -} -{ - var _rs = new stream.Readable(); - - _rs.push('ok'); - - _rs.push(null); - - _rs._read = function () {}; - - var _ws = new stream.Writable(); - - _ws.on('finish', common.mustNotCall()); - - _ws.on('error', common.mustCall()); - - _ws._write = function (chunk, encoding, done) { - done(new Error()); - }; - - _rs.pipe(_ws); -} -{ - var w = new stream.Writable(); - - w._write = function (chunk, encoding, cb) { - process.nextTick(cb); - }; - - w.on('error', common.mustCall()); - w.on('prefinish', function () { - w.write("shouldn't write in prefinish listener"); - }); - w.end(); -} -{ - var _w = new stream.Writable(); - - _w._write = function (chunk, encoding, cb) { - process.nextTick(cb); - }; - - _w.on('error', common.mustCall()); - - _w.on('finish', function () { - _w.write("shouldn't write in finish listener"); - }); - - _w.end(); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writableState-ending.js b/test/parallel/test-stream-writableState-ending.js deleted file mode 100644 index 49754b4385..0000000000 --- a/test/parallel/test-stream-writableState-ending.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var writable = new stream.Writable(); - -function testStates(ending, finished, ended) { - assert.strictEqual(writable._writableState.ending, ending); - assert.strictEqual(writable._writableState.finished, finished); - assert.strictEqual(writable._writableState.ended, ended); -} - -writable._write = function (chunk, encoding, cb) { - // ending, finished, ended start in false. - testStates(false, false, false); - cb(); -}; - -writable.on('finish', function () { - // ending, finished, ended = true. - testStates(true, true, true); -}); -var result = writable.end('testing function end()', function () { - // ending, finished, ended = true. - testStates(true, true, true); -}); // end returns the writable instance - -assert.strictEqual(result, writable); // ending, ended = true. -// finished = false. - -testStates(true, false, true); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js deleted file mode 100644 index d3efeb7ab8..0000000000 --- a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var writable = new stream.Writable(); -writable._writev = common.mustCall(function (chunks, cb) { - assert.strictEqual(chunks.length, 2); - cb(); -}, 1); -writable._write = common.mustCall(function (chunk, encoding, cb) { - cb(); -}, 1); // first cork - -writable.cork(); -assert.strictEqual(writable._writableState.corked, 1); -assert.strictEqual(writable._writableState.bufferedRequestCount, 0); // cork again - -writable.cork(); -assert.strictEqual(writable._writableState.corked, 2); // the first chunk is buffered - -writable.write('first chunk'); -assert.strictEqual(writable._writableState.bufferedRequestCount, 1); // first uncork does nothing - -writable.uncork(); -assert.strictEqual(writable._writableState.corked, 1); -assert.strictEqual(writable._writableState.bufferedRequestCount, 1); -process.nextTick(uncork); // the second chunk is buffered, because we uncork at the end of tick - -writable.write('second chunk'); -assert.strictEqual(writable._writableState.corked, 1); -assert.strictEqual(writable._writableState.bufferedRequestCount, 2); - -function uncork() { - // second uncork flushes the buffer - writable.uncork(); - assert.strictEqual(writable._writableState.corked, 0); - assert.strictEqual(writable._writableState.bufferedRequestCount, 0); // verify that end() uncorks correctly - - writable.cork(); - writable.write('third chunk'); - writable.end(); // end causes an uncork() as well - - assert.strictEqual(writable._writableState.corked, 0); - assert.strictEqual(writable._writableState.bufferedRequestCount, 0); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js deleted file mode 100644 index 6528b2bacd..0000000000 --- a/test/parallel/test-stream-write-destroy.js +++ /dev/null @@ -1,101 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Writable = _require.Writable; // Test interaction between calling .destroy() on a writable and pending -// writes. - - -for (var _i = 0, _arr = [false, true]; _i < _arr.length; _i++) { - var withPendingData = _arr[_i]; - - var _loop = function _loop() { - var useEnd = _arr2[_i2]; - var callbacks = []; - var w = new Writable({ - write: function write(data, enc, cb) { - callbacks.push(cb); - }, - // Effectively disable the HWM to observe 'drain' events more easily. - highWaterMark: 1 - }); - var chunksWritten = 0; - var drains = 0; - var finished = false; - w.on('drain', function () { - return drains++; - }); - w.on('finish', function () { - return finished = true; - }); - w.write('abc', function () { - return chunksWritten++; - }); - assert.strictEqual(chunksWritten, 0); - assert.strictEqual(drains, 0); - callbacks.shift()(); - assert.strictEqual(chunksWritten, 1); - assert.strictEqual(drains, 1); - - if (withPendingData) { - // Test 2 cases: There either is or is not data still in the write queue. - // (The second write will never actually get executed either way.) - w.write('def', function () { - return chunksWritten++; - }); - } - - if (useEnd) { - // Again, test 2 cases: Either we indicate that we want to end the - // writable or not. - w.end('ghi', function () { - return chunksWritten++; - }); - } else { - w.write('ghi', function () { - return chunksWritten++; - }); - } - - assert.strictEqual(chunksWritten, 1); - w.destroy(); - assert.strictEqual(chunksWritten, 1); - callbacks.shift()(); - assert.strictEqual(chunksWritten, 2); - assert.strictEqual(callbacks.length, 0); - assert.strictEqual(drains, 1); // When we used `.end()`, we see the 'finished' event if and only if - // we actually finished processing the write queue. - - assert.strictEqual(finished, !withPendingData && useEnd); - }; - - for (var _i2 = 0, _arr2 = [false, true]; _i2 < _arr2.length; _i2++) { - _loop(); - } -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-write-final.js b/test/parallel/test-stream-write-final.js deleted file mode 100644 index 3f7ba25754..0000000000 --- a/test/parallel/test-stream-write-final.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var shutdown = false; -var w = new stream.Writable({ - final: common.mustCall(function (cb) { - assert.strictEqual(this, w); - setTimeout(function () { - shutdown = true; - cb(); - }, 100); - }), - write: function write(chunk, e, cb) { - process.nextTick(cb); - } -}); -w.on('finish', common.mustCall(function () { - assert(shutdown); -})); -w.write(bufferShim.allocUnsafe(1)); -w.end(bufferShim.allocUnsafe(0)); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js deleted file mode 100644 index ea0fcc8afa..0000000000 --- a/test/parallel/test-stream-writev.js +++ /dev/null @@ -1,149 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var queue = []; - -for (var decode = 0; decode < 2; decode++) { - for (var uncork = 0; uncork < 2; uncork++) { - for (var multi = 0; multi < 2; multi++) { - queue.push([!!decode, !!uncork, !!multi]); - } - } -} - -run(); - -function run() { - var t = queue.pop(); - if (t) test(t[0], t[1], t[2], run);else require('tap').pass(); -} - -function test(decode, uncork, multi, next) { - require('tap').test("# decode=".concat(decode, " uncork=").concat(uncork, " multi=").concat(multi)); - - var counter = 0; - var expectCount = 0; - - function cnt(msg) { - expectCount++; - var expect = expectCount; - return function (er) { - assert.ifError(er); - counter++; - assert.strictEqual(counter, expect); - }; - } - - var w = new stream.Writable({ - decodeStrings: decode - }); - w._write = common.mustNotCall('Should not call _write'); - var expectChunks = decode ? [{ - encoding: 'buffer', - chunk: [104, 101, 108, 108, 111, 44, 32] - }, { - encoding: 'buffer', - chunk: [119, 111, 114, 108, 100] - }, { - encoding: 'buffer', - chunk: [33] - }, { - encoding: 'buffer', - chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] - }, { - encoding: 'buffer', - chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] - }] : [{ - encoding: 'ascii', - chunk: 'hello, ' - }, { - encoding: 'utf8', - chunk: 'world' - }, { - encoding: 'buffer', - chunk: [33] - }, { - encoding: 'latin1', - chunk: '\nand then...' - }, { - encoding: 'hex', - chunk: 'facebea7deadbeefdecafbad' - }]; - var actualChunks; - - w._writev = function (chunks, cb) { - actualChunks = chunks.map(function (chunk) { - return { - encoding: chunk.encoding, - chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk - }; - }); - cb(); - }; - - w.cork(); - w.write('hello, ', 'ascii', cnt('hello')); - w.write('world', 'utf8', cnt('world')); - if (multi) w.cork(); - w.write(bufferShim.from('!'), 'buffer', cnt('!')); - w.write('\nand then...', 'latin1', cnt('and then')); - if (multi) w.uncork(); - w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')); - if (uncork) w.uncork(); - w.end(cnt('end')); - w.on('finish', function () { - // make sure finish comes after all the write cb - cnt('finish')(); - assert.deepStrictEqual(actualChunks, expectChunks); - next(); - }); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js deleted file mode 100644 index 5ee7b9d52e..0000000000 --- a/test/parallel/test-stream2-base64-single-char-read-end.js +++ /dev/null @@ -1,83 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var R = require('../../lib/_stream_readable'); - -var W = require('../../lib/_stream_writable'); - -var assert = require('assert/'); - -var src = new R({ - encoding: 'base64' -}); -var dst = new W(); -var hasRead = false; -var accum = []; - -src._read = function (n) { - if (!hasRead) { - hasRead = true; - process.nextTick(function () { - src.push(bufferShim.from('1')); - src.push(null); - }); - } -}; - -dst._write = function (chunk, enc, cb) { - accum.push(chunk); - cb(); -}; - -src.on('end', function () { - assert.strictEqual(String(Buffer.concat(accum)), 'MQ=='); - clearTimeout(timeout); -}); -src.pipe(dst); -var timeout = setTimeout(function () { - assert.fail('timed out waiting for _write'); -}, 100); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js deleted file mode 100644 index 7a16dc3fbf..0000000000 --- a/test/parallel/test-stream2-basic.js +++ /dev/null @@ -1,442 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var R = require('../../lib/_stream_readable'); - -var assert = require('assert/'); - -var EE = require('events').EventEmitter; - -var TestReader = -/*#__PURE__*/ -function (_R) { - _inherits(TestReader, _R); - - function TestReader(n) { - var _this; - - _classCallCheck(this, TestReader); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this)); - _this._buffer = bufferShim.alloc(n || 100, 'x'); - _this._pos = 0; - _this._bufs = 10; - return _this; - } - - _createClass(TestReader, [{ - key: "_read", - value: function _read(n) { - var _this2 = this; - - var max = this._buffer.length - this._pos; - n = Math.max(n, 0); - var toRead = Math.min(n, max); - - if (toRead === 0) { - // simulate the read buffer filling up with some more bytes some time - // in the future. - setTimeout(function () { - _this2._pos = 0; - _this2._bufs -= 1; - - if (_this2._bufs <= 0) { - // read them all! - if (!_this2.ended) _this2.push(null); - } else { - // now we have more. - // kinda cheating by calling _read, but whatever, - // it's just fake anyway. - _this2._read(n); - } - }, 10); - return; - } - - var ret = this._buffer.slice(this._pos, this._pos + toRead); - - this._pos += toRead; - this.push(ret); - } - }]); - - return TestReader; -}(R); - -var TestWriter = -/*#__PURE__*/ -function (_EE) { - _inherits(TestWriter, _EE); - - function TestWriter() { - var _this3; - - _classCallCheck(this, TestWriter); - - _this3 = _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).call(this)); - _this3.received = []; - _this3.flush = false; - return _this3; - } - - _createClass(TestWriter, [{ - key: "write", - value: function write(c) { - this.received.push(c.toString()); - this.emit('write', c); - return true; - } - }, { - key: "end", - value: function end(c) { - if (c) this.write(c); - this.emit('end', this.received); - } - }]); - - return TestWriter; -}(EE); - -{ - // Test basic functionality - var r = new TestReader(20); - var reads = []; - var expect = ['x', 'xx', 'xxx', 'xxxx', 'xxxxx', 'xxxxxxxxx', 'xxxxxxxxxx', 'xxxxxxxxxxxx', 'xxxxxxxxxxxxx', 'xxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxxxxxx', 'xxxxxxxxxxxxxxxxxxxxx']; - r.on('end', common.mustCall(function () { - assert.deepStrictEqual(reads, expect); - })); - var readSize = 1; - - function flow() { - var res; - - while (null !== (res = r.read(readSize++))) { - reads.push(res.toString()); - } - - r.once('readable', flow); - } - - flow(); -} -{ - // Verify pipe - var _r = new TestReader(5); - - var _expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']; - var w = new TestWriter(); - w.on('end', common.mustCall(function (received) { - assert.deepStrictEqual(received, _expect); - })); - - _r.pipe(w); -} -forEach([1, 2, 3, 4, 5, 6, 7, 8, 9], function (SPLIT) { - // Verify unpipe - var r = new TestReader(5); // unpipe after 3 writes, then write to another stream instead. - - var expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']; - expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)]; - var w = [new TestWriter(), new TestWriter()]; - var writes = SPLIT; - w[0].on('write', function () { - if (--writes === 0) { - r.unpipe(); - assert.strictEqual(r._readableState.pipes, null); - w[0].end(); - r.pipe(w[1]); - assert.strictEqual(r._readableState.pipes, w[1]); - } - }); - var ended = 0; - w[0].on('end', common.mustCall(function (results) { - ended++; - assert.strictEqual(ended, 1); - assert.deepStrictEqual(results, expect[0]); - })); - w[1].on('end', common.mustCall(function (results) { - ended++; - assert.strictEqual(ended, 2); - assert.deepStrictEqual(results, expect[1]); - })); - r.pipe(w[0]); -}); -{ - // Verify both writers get the same data when piping to destinations - var _r2 = new TestReader(5); - - var _w = [new TestWriter(), new TestWriter()]; - var _expect2 = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']; - - _w[0].on('end', common.mustCall(function (received) { - assert.deepStrictEqual(received, _expect2); - })); - - _w[1].on('end', common.mustCall(function (received) { - assert.deepStrictEqual(received, _expect2); - })); - - _r2.pipe(_w[0]); - - _r2.pipe(_w[1]); -} -forEach([1, 2, 3, 4, 5, 6, 7, 8, 9], function (SPLIT) { - // Verify multi-unpipe - var r = new TestReader(5); // unpipe after 3 writes, then write to another stream instead. - - var expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']; - expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)]; - var w = [new TestWriter(), new TestWriter(), new TestWriter()]; - var writes = SPLIT; - w[0].on('write', function () { - if (--writes === 0) { - r.unpipe(); - w[0].end(); - r.pipe(w[1]); - } - }); - var ended = 0; - w[0].on('end', common.mustCall(function (results) { - ended++; - assert.strictEqual(ended, 1); - assert.deepStrictEqual(results, expect[0]); - })); - w[1].on('end', common.mustCall(function (results) { - ended++; - assert.strictEqual(ended, 2); - assert.deepStrictEqual(results, expect[1]); - })); - r.pipe(w[0]); - r.pipe(w[2]); -}); -{ - // Verify that back pressure is respected - var _r3 = new R({ - objectMode: true - }); - - _r3._read = common.mustNotCall(); - var counter = 0; - - _r3.push(['one']); - - _r3.push(['two']); - - _r3.push(['three']); - - _r3.push(['four']); - - _r3.push(null); - - var w1 = new R(); - - w1.write = function (chunk) { - assert.strictEqual(chunk[0], 'one'); - w1.emit('close'); - process.nextTick(function () { - _r3.pipe(w2); - - _r3.pipe(w3); - }); - }; - - w1.end = common.mustNotCall(); - - _r3.pipe(w1); - - var expected = ['two', 'two', 'three', 'three', 'four', 'four']; - var w2 = new R(); - - w2.write = function (chunk) { - assert.strictEqual(chunk[0], expected.shift()); - assert.strictEqual(counter, 0); - counter++; - - if (chunk[0] === 'four') { - return true; - } - - setTimeout(function () { - counter--; - w2.emit('drain'); - }, 10); - return false; - }; - - w2.end = common.mustCall(); - var w3 = new R(); - - w3.write = function (chunk) { - assert.strictEqual(chunk[0], expected.shift()); - assert.strictEqual(counter, 1); - counter++; - - if (chunk[0] === 'four') { - return true; - } - - setTimeout(function () { - counter--; - w3.emit('drain'); - }, 50); - return false; - }; - - w3.end = common.mustCall(function () { - assert.strictEqual(counter, 2); - assert.strictEqual(expected.length, 0); - }); -} -{ - // Verify read(0) behavior for ended streams - var _r4 = new R(); - - var written = false; - var ended = false; - _r4._read = common.mustNotCall(); - - _r4.push(bufferShim.from('foo')); - - _r4.push(null); - - var v = _r4.read(0); - - assert.strictEqual(v, null); - - var _w2 = new R(); - - _w2.write = function (buffer) { - written = true; - assert.strictEqual(ended, false); - assert.strictEqual(buffer.toString(), 'foo'); - }; - - _w2.end = common.mustCall(function () { - ended = true; - assert.strictEqual(written, true); - }); - - _r4.pipe(_w2); -} -{ - // Verify synchronous _read ending - var _r5 = new R(); - - var called = false; - - _r5._read = function (n) { - _r5.push(null); - }; - - _r5.once('end', function () { - // Verify that this is called before the next tick - called = true; - }); - - _r5.read(); - - process.nextTick(function () { - assert.strictEqual(called, true); - }); -} -{ - // Verify that adding readable listeners trigger data flow - var _r6 = new R({ - highWaterMark: 5 - }); - - var onReadable = false; - var readCalled = 0; - - _r6._read = function (n) { - if (readCalled++ === 2) _r6.push(null);else _r6.push(bufferShim.from('asdf')); - }; - - _r6.on('readable', function () { - onReadable = true; - - _r6.read(); - }); - - _r6.on('end', common.mustCall(function () { - assert.strictEqual(readCalled, 3); - assert.ok(onReadable); - })); -} -{ - // Verify that streams are chainable - var _r7 = new R(); - - _r7._read = common.mustCall(); - - var r2 = _r7.setEncoding('utf8').pause().resume().pause(); - - assert.strictEqual(_r7, r2); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js deleted file mode 100644 index 04ab8eeced..0000000000 --- a/test/parallel/test-stream2-compatibility.js +++ /dev/null @@ -1,147 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var R = require('../../lib/_stream_readable'); - -var W = require('../../lib/_stream_writable'); - -var assert = require('assert/'); - -var ondataCalled = 0; - -var TestReader = -/*#__PURE__*/ -function (_R) { - _inherits(TestReader, _R); - - function TestReader() { - var _this; - - _classCallCheck(this, TestReader); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this)); - _this._buffer = bufferShim.alloc(100, 'x'); - - _this.on('data', function () { - ondataCalled++; - }); - - return _this; - } - - _createClass(TestReader, [{ - key: "_read", - value: function _read(n) { - this.push(this._buffer); - this._buffer = bufferShim.alloc(0); - } - }]); - - return TestReader; -}(R); - -var reader = new TestReader(); -setImmediate(function () { - assert.strictEqual(ondataCalled, 1); - - require('tap').pass(); - - reader.push(null); -}); - -var TestWriter = -/*#__PURE__*/ -function (_W) { - _inherits(TestWriter, _W); - - function TestWriter() { - var _this2; - - _classCallCheck(this, TestWriter); - - _this2 = _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).call(this)); - - _this2.write('foo'); - - _this2.end(); - - return _this2; - } - - _createClass(TestWriter, [{ - key: "_write", - value: function _write(chunk, enc, cb) { - cb(); - } - }]); - - return TestWriter; -}(W); - -var writer = new TestWriter(); -process.on('exit', function () { - assert.strictEqual(reader.readable, false); - assert.strictEqual(writer.writable, false); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js deleted file mode 100644 index 600e0c6583..0000000000 --- a/test/parallel/test-stream2-decode-partial.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var Readable = require('../../lib/_stream_readable'); - -var assert = require('assert/'); - -var buf = ''; -var euro = bufferShim.from([0xE2, 0x82, 0xAC]); -var cent = bufferShim.from([0xC2, 0xA2]); -var source = Buffer.concat([euro, cent]); -var readable = Readable({ - encoding: 'utf8' -}); -readable.push(source.slice(0, 2)); -readable.push(source.slice(2, 4)); -readable.push(source.slice(4, source.length)); -; -readable.push(null); -readable.on('data', function (data) { - buf += data; -}); -process.on('exit', function () { - assert.strictEqual(buf, '€¢'); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js deleted file mode 100644 index f20cdb0d24..0000000000 --- a/test/parallel/test-stream2-finish-pipe.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var stream = require('../../'); - -var r = new stream.Readable(); - -r._read = function (size) { - r.push(bufferShim.allocUnsafe(size)); -}; - -var w = new stream.Writable(); - -w._write = function (data, encoding, cb) { - cb(null); -}; - -r.pipe(w); // This might sound unrealistic, but it happens in net.js. When -// `socket.allowHalfOpen === false`, EOF will cause `.destroySoon()` call which -// ends the writable side of net.Socket. - -w.end(); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js deleted file mode 100644 index fe628142fa..0000000000 --- a/test/parallel/test-stream2-large-read-stall.js +++ /dev/null @@ -1,98 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); // If everything aligns so that you do a read(n) of exactly the -// remaining buffer, then make sure that 'end' still emits. - - -var READSIZE = 100; -var PUSHSIZE = 20; -var PUSHCOUNT = 1000; -var HWM = 50; - -var Readable = require('../../').Readable; - -var r = new Readable({ - highWaterMark: HWM -}); -var rs = r._readableState; -r._read = push; -r.on('readable', function () { - ; - false && console.error('>> readable'); - var ret; - - do { - ; - false && console.error(" > read(".concat(READSIZE, ")")); - ret = r.read(READSIZE); - ; - false && console.error(" < ".concat(ret && ret.length, " (").concat(rs.length, " remain)")); - } while (ret && ret.length === READSIZE); - - ; - false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length); -}); -r.on('end', common.mustCall(function () { - assert.strictEqual(pushes, PUSHCOUNT + 1); -})); -var pushes = 0; - -function push() { - if (pushes > PUSHCOUNT) return; - - if (pushes++ === PUSHCOUNT) { - ; - false && console.error(' push(EOF)'); - return r.push(null); - } - - ; - false && console.error(" push #".concat(pushes)); - if (r.push(bufferShim.allocUnsafe(PUSHSIZE))) setTimeout(push, 1); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js deleted file mode 100644 index ec0c17d491..0000000000 --- a/test/parallel/test-stream2-objects.js +++ /dev/null @@ -1,394 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var Readable = require('../../lib/_stream_readable'); - -var Writable = require('../../lib/_stream_writable'); - -var assert = require('assert/'); - -function toArray(callback) { - var stream = new Writable({ - objectMode: true - }); - var list = []; - - stream.write = function (chunk) { - list.push(chunk); - }; - - stream.end = common.mustCall(function () { - callback(list); - }); - return stream; -} - -function fromArray(list) { - var r = new Readable({ - objectMode: true - }); - r._read = common.mustNotCall(); - forEach(list, function (chunk) { - r.push(chunk); - }); - r.push(null); - return r; -} - -{ - // Verify that objects can be read from the stream - var r = fromArray([{ - one: '1' - }, { - two: '2' - }]); - var v1 = r.read(); - var v2 = r.read(); - var v3 = r.read(); - assert.deepStrictEqual(v1, { - one: '1' - }); - assert.deepStrictEqual(v2, { - two: '2' - }); - assert.deepStrictEqual(v3, null); -} -{ - // Verify that objects can be piped into the stream - var _r = fromArray([{ - one: '1' - }, { - two: '2' - }]); - - _r.pipe(toArray(common.mustCall(function (list) { - assert.deepStrictEqual(list, [{ - one: '1' - }, { - two: '2' - }]); - }))); -} -{ - // Verify that read(n) is ignored - var _r2 = fromArray([{ - one: '1' - }, { - two: '2' - }]); - - var value = _r2.read(2); - - assert.deepStrictEqual(value, { - one: '1' - }); -} -{ - // Verify that objects can be synchronously read - var _r3 = new Readable({ - objectMode: true - }); - - var list = [{ - one: '1' - }, { - two: '2' - }]; - - _r3._read = function (n) { - var item = list.shift(); - - _r3.push(item || null); - }; - - _r3.pipe(toArray(common.mustCall(function (list) { - assert.deepStrictEqual(list, [{ - one: '1' - }, { - two: '2' - }]); - }))); -} -{ - // Verify that objects can be asynchronously read - var _r4 = new Readable({ - objectMode: true - }); - - var _list2 = [{ - one: '1' - }, { - two: '2' - }]; - - _r4._read = function (n) { - var item = _list2.shift(); - - process.nextTick(function () { - _r4.push(item || null); - }); - }; - - _r4.pipe(toArray(common.mustCall(function (list) { - assert.deepStrictEqual(list, [{ - one: '1' - }, { - two: '2' - }]); - }))); -} -{ - // Verify that strings can be read as objects - var _r5 = new Readable({ - objectMode: true - }); - - _r5._read = common.mustNotCall(); - var _list3 = ['one', 'two', 'three']; - forEach(_list3, function (str) { - _r5.push(str); - }); - - _r5.push(null); - - _r5.pipe(toArray(common.mustCall(function (array) { - assert.deepStrictEqual(array, _list3); - }))); -} -{ - // Verify read(0) behavior for object streams - var _r6 = new Readable({ - objectMode: true - }); - - _r6._read = common.mustNotCall(); - - _r6.push('foobar'); - - _r6.push(null); - - _r6.pipe(toArray(common.mustCall(function (array) { - assert.deepStrictEqual(array, ['foobar']); - }))); -} -{ - // Verify the behavior of pushing falsey values - var _r7 = new Readable({ - objectMode: true - }); - - _r7._read = common.mustNotCall(); - - _r7.push(false); - - _r7.push(0); - - _r7.push(''); - - _r7.push(null); - - _r7.pipe(toArray(common.mustCall(function (array) { - assert.deepStrictEqual(array, [false, 0, '']); - }))); -} -{ - // Verify high watermark _read() behavior - var _r8 = new Readable({ - highWaterMark: 6, - objectMode: true - }); - - var calls = 0; - var _list4 = ['1', '2', '3', '4', '5', '6', '7', '8']; - - _r8._read = function (n) { - calls++; - }; - - forEach(_list4, function (c) { - _r8.push(c); - }); - - var v = _r8.read(); - - assert.strictEqual(calls, 0); - assert.strictEqual(v, '1'); - - var _v = _r8.read(); - - assert.strictEqual(_v, '2'); - - var _v2 = _r8.read(); - - assert.strictEqual(_v2, '3'); - assert.strictEqual(calls, 1); -} -{ - // Verify high watermark push behavior - var _r9 = new Readable({ - highWaterMark: 6, - objectMode: true - }); - - _r9._read = common.mustNotCall(); - - for (var i = 0; i < 6; i++) { - var bool = _r9.push(i); - - assert.strictEqual(bool, i !== 5); - } -} -{ - // Verify that objects can be written to stream - var w = new Writable({ - objectMode: true - }); - - w._write = function (chunk, encoding, cb) { - assert.deepStrictEqual(chunk, { - foo: 'bar' - }); - cb(); - }; - - w.on('finish', common.mustCall()); - w.write({ - foo: 'bar' - }); - w.end(); -} -{ - // Verify that multiple objects can be written to stream - var _w = new Writable({ - objectMode: true - }); - - var _list5 = []; - - _w._write = function (chunk, encoding, cb) { - _list5.push(chunk); - - cb(); - }; - - _w.on('finish', common.mustCall(function () { - assert.deepStrictEqual(_list5, [0, 1, 2, 3, 4]); - })); - - _w.write(0); - - _w.write(1); - - _w.write(2); - - _w.write(3); - - _w.write(4); - - _w.end(); -} -{ - // Verify that strings can be written as objects - var _w2 = new Writable({ - objectMode: true - }); - - var _list6 = []; - - _w2._write = function (chunk, encoding, cb) { - _list6.push(chunk); - - process.nextTick(cb); - }; - - _w2.on('finish', common.mustCall(function () { - assert.deepStrictEqual(_list6, ['0', '1', '2', '3', '4']); - })); - - _w2.write('0'); - - _w2.write('1'); - - _w2.write('2'); - - _w2.write('3'); - - _w2.write('4'); - - _w2.end(); -} -{ - // Verify that stream buffers finish until callback is called - var _w3 = new Writable({ - objectMode: true - }); - - var called = false; - - _w3._write = function (chunk, encoding, cb) { - assert.strictEqual(chunk, 'foo'); - process.nextTick(function () { - called = true; - cb(); - }); - }; - - _w3.on('finish', common.mustCall(function () { - assert.strictEqual(called, true); - })); - - _w3.write('foo'); - - _w3.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js deleted file mode 100644 index e621d48dc6..0000000000 --- a/test/parallel/test-stream2-pipe-error-handling.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -{ - var count = 1000; - var source = new stream.Readable(); - - source._read = function (n) { - n = Math.min(count, n); - count -= n; - source.push(bufferShim.allocUnsafe(n)); - }; - - var unpipedDest; - - source.unpipe = function (dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - var dest = new stream.Writable(); - - dest._write = function (chunk, encoding, cb) { - cb(); - }; - - source.pipe(dest); - var gotErr = null; - dest.on('error', function (err) { - gotErr = err; - }); - var unpipedSource; - dest.on('unpipe', function (src) { - unpipedSource = src; - }); - var err = new Error('This stream turned into bacon.'); - dest.emit('error', err); - assert.strictEqual(gotErr, err); - assert.strictEqual(unpipedSource, source); - assert.strictEqual(unpipedDest, dest); -} -{ - var _count = 1000; - - var _source = new stream.Readable(); - - _source._read = function (n) { - n = Math.min(_count, n); - _count -= n; - - _source.push(bufferShim.allocUnsafe(n)); - }; - - var _unpipedDest; - - _source.unpipe = function (dest) { - _unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - var _dest = new stream.Writable(); - - _dest._write = function (chunk, encoding, cb) { - cb(); - }; - - _source.pipe(_dest); - - var _unpipedSource; - - _dest.on('unpipe', function (src) { - _unpipedSource = src; - }); - - var _err = new Error('This stream turned into bacon.'); - - var _gotErr = null; - - try { - _dest.emit('error', _err); - } catch (e) { - _gotErr = e; - } - - assert.strictEqual(_gotErr, _err); - assert.strictEqual(_unpipedSource, _source); - assert.strictEqual(_unpipedDest, _dest); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js deleted file mode 100644 index ad613095d1..0000000000 --- a/test/parallel/test-stream2-pipe-error-once-listener.js +++ /dev/null @@ -1,119 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var stream = require('../../'); - -var Read = -/*#__PURE__*/ -function (_stream$Readable) { - _inherits(Read, _stream$Readable); - - function Read() { - _classCallCheck(this, Read); - - return _possibleConstructorReturn(this, _getPrototypeOf(Read).apply(this, arguments)); - } - - _createClass(Read, [{ - key: "_read", - value: function _read(size) { - this.push('x'); - this.push(null); - } - }]); - - return Read; -}(stream.Readable); - -var Write = -/*#__PURE__*/ -function (_stream$Writable) { - _inherits(Write, _stream$Writable); - - function Write() { - _classCallCheck(this, Write); - - return _possibleConstructorReturn(this, _getPrototypeOf(Write).apply(this, arguments)); - } - - _createClass(Write, [{ - key: "_write", - value: function _write(buffer, encoding, cb) { - this.emit('error', new Error('boom')); - this.emit('alldone'); - } - }]); - - return Write; -}(stream.Writable); - -var read = new Read(); -var write = new Write(); -write.once('error', function () {}); -write.once('alldone', function (err) { - require('tap').pass(); -}); -process.on('exit', function (c) { - console.error('error thrown even with listener'); -}); -read.pipe(write); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js deleted file mode 100644 index b7b9083f0e..0000000000 --- a/test/parallel/test-stream2-push.js +++ /dev/null @@ -1,143 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var _require = require('../../'), - Readable = _require.Readable, - Writable = _require.Writable; - -var EE = require('events').EventEmitter; // a mock thing a bit like the net.Socket/tcp_wrap.handle interaction - - -var stream = new Readable({ - highWaterMark: 16, - encoding: 'utf8' -}); -var source = new EE(); - -stream._read = function () { - console.error('stream._read'); - readStart(); -}; - -var ended = false; -stream.on('end', function () { - ended = true; -}); -source.on('data', function (chunk) { - var ret = stream.push(chunk); - console.error('data', stream.readableLength); - if (!ret) readStop(); -}); -source.on('end', function () { - stream.push(null); -}); -var reading = false; - -function readStart() { - console.error('readStart'); - reading = true; -} - -function readStop() { - console.error('readStop'); - reading = false; - process.nextTick(function () { - var r = stream.read(); - if (r !== null) writer.write(r); - }); -} - -var writer = new Writable({ - decodeStrings: false -}); -var written = []; -var expectWritten = ['asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg']; - -writer._write = function (chunk, encoding, cb) { - console.error("WRITE ".concat(chunk)); - written.push(chunk); - process.nextTick(cb); -}; - -writer.on('finish', finish); // now emit some chunks. - -var chunk = 'asdfg'; -var set = 0; -readStart(); -data(); - -function data() { - assert(reading); - source.emit('data', chunk); - assert(reading); - source.emit('data', chunk); - assert(reading); - source.emit('data', chunk); - assert(reading); - source.emit('data', chunk); - assert(!reading); - if (set++ < 5) setTimeout(data, 10);else end(); -} - -function finish() { - console.error('finish'); - assert.deepStrictEqual(written, expectWritten); - - require('tap').pass(); -} - -function end() { - source.emit('end'); - assert(!reading); - writer.end(stream.read()); - setImmediate(function () { - assert(ended); - }); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js deleted file mode 100644 index 7b94553928..0000000000 --- a/test/parallel/test-stream2-read-sync-stack.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var Readable = require('../../').Readable; // This tests synchronous read callbacks and verifies that even if they nest -// heavily the process handles it without an error - - -var r = new Readable(); -var N = 256 * 1024; -var reads = 0; - -r._read = function (n) { - var chunk = reads++ === N ? null : bufferShim.allocUnsafe(1); - r.push(chunk); -}; - -r.on('readable', function onReadable() { - if (!(r.readableLength % 256)) console.error('readable', r.readableLength); - r.read(N * 2); -}); -r.on('end', common.mustCall()); -r.read(0); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js deleted file mode 100644 index d62979a07f..0000000000 --- a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js +++ /dev/null @@ -1,159 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../').Readable; - -test1(); -test2(); - -function test1() { - var r = new Readable(); // should not end when we get a bufferShim.alloc(0) or '' as the _read - // result that just means that there is *temporarily* no data, but to - // go ahead and try again later. - // - // note that this is very unusual. it only works for crypto streams - // because the other side of the stream will call read(0) to cycle - // data through openssl. that's why setImmediate() is used to call - // r.read(0) again later, otherwise there is no more work being done - // and the process just exits. - - var buf = bufferShim.alloc(5, 'x'); - var reads = 5; - - r._read = function (n) { - switch (reads--) { - case 5: - return setImmediate(function () { - return r.push(buf); - }); - - case 4: - setImmediate(function () { - return r.push(bufferShim.alloc(0)); - }); - return setImmediate(r.read.bind(r, 0)); - - case 3: - setTimeout(r.read.bind(r, 0), 50); - return process.nextTick(function () { - return r.push(bufferShim.alloc(0)); - }); - - case 2: - setImmediate(r.read.bind(r, 0)); - return r.push(bufferShim.alloc(0)); - // Not-EOF! - - case 1: - return r.push(buf); - - case 0: - return r.push(null); - // EOF - - default: - throw new Error('unreachable'); - } - }; - - var results = []; - - function flow() { - var chunk; - - while (null !== (chunk = r.read())) { - results.push(String(chunk)); - } - } - - r.on('readable', flow); - r.on('end', function () { - results.push('EOF'); - }); - flow(); - process.on('exit', function () { - assert.deepStrictEqual(results, ['xxxxx', 'xxxxx', 'EOF']); - - require('tap').pass(); - }); -} - -function test2() { - var r = new Readable({ - encoding: 'base64' - }); - var reads = 5; - - r._read = function (n) { - if (!reads--) return r.push(null); // EOF - else return r.push(bufferShim.from('x')); - }; - - var results = []; - - function flow() { - var chunk; - - while (null !== (chunk = r.read())) { - results.push(String(chunk)); - } - } - - r.on('readable', flow); - r.on('end', function () { - results.push('EOF'); - }); - flow(); - process.on('exit', function () { - assert.deepStrictEqual(results, ['eHh4', 'eHg=', 'EOF']); - - require('tap').pass(); - }); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js deleted file mode 100644 index 90376973d9..0000000000 --- a/test/parallel/test-stream2-readable-from-list.js +++ /dev/null @@ -1,135 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var fromList = require('../../lib/_stream_readable')._fromList; - -var BufferList = require('../../lib/internal/streams/buffer_list'); - -var util = require('util'); - -function bufferListFromArray(arr) { - var bl = new BufferList(); - - for (var i = 0; i < arr.length; ++i) { - bl.push(arr[i]); - } - - return bl; -} - -{ - // Verify behavior with buffers - var list = [bufferShim.from('foog'), bufferShim.from('bark'), bufferShim.from('bazy'), bufferShim.from('kuel')]; - list = bufferListFromArray(list); - assert.strictEqual(util.inspect([list], { - compact: false - }).indexOf('BufferList') > 0, true); // read more than the first element. - - var ret = fromList(6, { - buffer: list, - length: 16 - }); - assert.strictEqual(ret.toString(), 'foogba'); // read exactly the first element. - - ret = fromList(2, { - buffer: list, - length: 10 - }); - assert.strictEqual(ret.toString(), 'rk'); // read less than the first element. - - ret = fromList(2, { - buffer: list, - length: 8 - }); - assert.strictEqual(ret.toString(), 'ba'); // read more than we have. - - ret = fromList(100, { - buffer: list, - length: 6 - }); - assert.strictEqual(ret.toString(), 'zykuel'); // all consumed. - - assert.deepStrictEqual(list, new BufferList()); -} -{ - // Verify behavior with strings - var _list2 = ['foog', 'bark', 'bazy', 'kuel']; - _list2 = bufferListFromArray(_list2); // read more than the first element. - - var _ret = fromList(6, { - buffer: _list2, - length: 16, - decoder: true - }); - - assert.strictEqual(_ret, 'foogba'); // read exactly the first element. - - _ret = fromList(2, { - buffer: _list2, - length: 10, - decoder: true - }); - assert.strictEqual(_ret, 'rk'); // read less than the first element. - - _ret = fromList(2, { - buffer: _list2, - length: 8, - decoder: true - }); - assert.strictEqual(_ret, 'ba'); // read more than we have. - - _ret = fromList(100, { - buffer: _list2, - length: 6, - decoder: true - }); - assert.strictEqual(_ret, 'zykuel'); // all consumed. - - assert.deepStrictEqual(_list2, new BufferList()); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js deleted file mode 100644 index 08cd9a57d8..0000000000 --- a/test/parallel/test-stream2-readable-legacy-drain.js +++ /dev/null @@ -1,86 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Stream = require('../../'); - -var Readable = require('../../').Readable; - -var r = new Readable(); -var N = 256; -var reads = 0; - -r._read = function (n) { - return r.push(++reads === N ? null : bufferShim.allocUnsafe(1)); -}; - -r.on('end', common.mustCall()); -var w = new Stream(); -w.writable = true; -var buffered = 0; - -w.write = function (c) { - buffered += c.length; - process.nextTick(drain); - return false; -}; - -function drain() { - assert(buffered <= 3); - buffered = 0; - w.emit('drain'); -} - -w.end = common.mustCall(); // Just for kicks, let's mess with the drain count. -// This verifies that even if it gets negative in the -// pipe() cleanup function, we'll still function properly. - -r.on('readable', function () { - w.emit('drain'); -}); -r.pipe(w); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js deleted file mode 100644 index 5b129fd4f6..0000000000 --- a/test/parallel/test-stream2-readable-non-empty-end.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var Readable = require('../../lib/_stream_readable'); - -var len = 0; -var chunks = new Array(10); - -for (var i = 1; i <= 10; i++) { - chunks[i - 1] = bufferShim.allocUnsafe(i); - len += i; -} - -var test = new Readable(); -var n = 0; - -test._read = function (size) { - var chunk = chunks[n++]; - setTimeout(function () { - test.push(chunk === undefined ? null : chunk); - }, 1); -}; - -test.on('end', thrower); - -function thrower() { - throw new Error('this should not happen!'); -} - -var bytesread = 0; -test.on('readable', function () { - var b = len - bytesread - 1; - var res = test.read(b); - - if (res) { - bytesread += res.length; - console.error("br=".concat(bytesread, " len=").concat(len)); - setTimeout(next, 1); - } - - test.read(0); -}); -test.read(0); - -function next() { - // now let's make 'end' happen - test.removeListener('end', thrower); - test.on('end', common.mustCall()); // one to get the last byte - - var r = test.read(); - assert(r); - assert.strictEqual(r.length, 1); - r = test.read(); - assert.strictEqual(r, null); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js deleted file mode 100644 index 99c4f4e1f9..0000000000 --- a/test/parallel/test-stream2-readable-wrap-empty.js +++ /dev/null @@ -1,60 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var Readable = require('../../lib/_stream_readable'); - -var EE = require('events').EventEmitter; - -var oldStream = new EE(); - -oldStream.pause = function () {}; - -oldStream.resume = function () {}; - -var newStream = new Readable().wrap(oldStream); -newStream.on('readable', function () {}).on('end', common.mustCall()); -oldStream.emit('end'); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js deleted file mode 100644 index d1c7c8f693..0000000000 --- a/test/parallel/test-stream2-set-encoding.js +++ /dev/null @@ -1,285 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var R = require('../../lib/_stream_readable'); - -var TestReader = -/*#__PURE__*/ -function (_R) { - _inherits(TestReader, _R); - - function TestReader(n, opts) { - var _this; - - _classCallCheck(this, TestReader); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this, opts)); - _this.pos = 0; - _this.len = n || 100; - return _this; - } - - _createClass(TestReader, [{ - key: "_read", - value: function _read(n) { - var _this2 = this; - - setTimeout(function () { - if (_this2.pos >= _this2.len) { - // double push(null) to test eos handling - _this2.push(null); - - return _this2.push(null); - } - - n = Math.min(n, _this2.len - _this2.pos); - - if (n <= 0) { - // double push(null) to test eos handling - _this2.push(null); - - return _this2.push(null); - } - - _this2.pos += n; - var ret = bufferShim.alloc(n, 'a'); - return _this2.push(ret); - }, 1); - } - }]); - - return TestReader; -}(R); - -{ - // Verify utf8 encoding - var tr = new TestReader(100); - tr.setEncoding('utf8'); - var out = []; - var expect = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa']; - tr.on('readable', function flow() { - var chunk; - - while (null !== (chunk = tr.read(10))) { - out.push(chunk); - } - }); - tr.on('end', common.mustCall(function () { - assert.deepStrictEqual(out, expect); - })); -} -{ - // Verify hex encoding - var _tr = new TestReader(100); - - _tr.setEncoding('hex'); - - var _out = []; - var _expect = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161']; - - _tr.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr.read(10))) { - _out.push(chunk); - } - }); - - _tr.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out, _expect); - })); -} -{ - // Verify hex encoding with read(13) - var _tr2 = new TestReader(100); - - _tr2.setEncoding('hex'); - - var _out2 = []; - var _expect2 = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161']; - - _tr2.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr2.read(13))) { - _out2.push(chunk); - } - }); - - _tr2.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out2, _expect2); - })); -} -{ - // Verify base64 encoding - var _tr3 = new TestReader(100); - - _tr3.setEncoding('base64'); - - var _out3 = []; - var _expect3 = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ==']; - - _tr3.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr3.read(10))) { - _out3.push(chunk); - } - }); - - _tr3.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out3, _expect3); - })); -} -{ - // Verify utf8 encoding - var _tr4 = new TestReader(100, { - encoding: 'utf8' - }); - - var _out4 = []; - var _expect4 = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa']; - - _tr4.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr4.read(10))) { - _out4.push(chunk); - } - }); - - _tr4.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out4, _expect4); - })); -} -{ - // Verify hex encoding - var _tr5 = new TestReader(100, { - encoding: 'hex' - }); - - var _out5 = []; - var _expect5 = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161']; - - _tr5.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr5.read(10))) { - _out5.push(chunk); - } - }); - - _tr5.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out5, _expect5); - })); -} -{ - // Verify hex encoding with read(13) - var _tr6 = new TestReader(100, { - encoding: 'hex' - }); - - var _out6 = []; - var _expect6 = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161']; - - _tr6.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr6.read(13))) { - _out6.push(chunk); - } - }); - - _tr6.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out6, _expect6); - })); -} -{ - // Verify base64 encoding - var _tr7 = new TestReader(100, { - encoding: 'base64' - }); - - var _out7 = []; - var _expect7 = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ==']; - - _tr7.on('readable', function flow() { - var chunk; - - while (null !== (chunk = _tr7.read(10))) { - _out7.push(chunk); - } - }); - - _tr7.on('end', common.mustCall(function () { - assert.deepStrictEqual(_out7, _expect7); - })); -} -{ - // Verify chaining behavior - var _tr8 = new TestReader(100); - - assert.deepStrictEqual(_tr8.setEncoding('utf8'), _tr8); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js deleted file mode 100644 index 9bf6158c49..0000000000 --- a/test/parallel/test-stream2-transform.js +++ /dev/null @@ -1,576 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var PassThrough = require('../../lib/_stream_passthrough'); - -var Transform = require('../../lib/_stream_transform'); - -{ - // Verify writable side consumption - var tx = new Transform({ - highWaterMark: 10 - }); - var transformed = 0; - - tx._transform = function (chunk, encoding, cb) { - transformed += chunk.length; - tx.push(chunk); - cb(); - }; - - for (var i = 1; i <= 10; i++) { - tx.write(bufferShim.allocUnsafe(i)); - } - - tx.end(); - assert.strictEqual(tx.readableLength, 10); - assert.strictEqual(transformed, 10); - assert.strictEqual(tx._transformState.writechunk.length, 5); - assert.deepStrictEqual(tx.writableBuffer.map(function (c) { - return c.chunk.length; - }), [6, 7, 8, 9, 10]); -} -{ - // Verify passthrough behavior - var pt = new PassThrough(); - pt.write(bufferShim.from('foog')); - pt.write(bufferShim.from('bark')); - pt.write(bufferShim.from('bazy')); - pt.write(bufferShim.from('kuel')); - pt.end(); - assert.strictEqual(pt.read(5).toString(), 'foogb'); - assert.strictEqual(pt.read(5).toString(), 'arkba'); - assert.strictEqual(pt.read(5).toString(), 'zykue'); - assert.strictEqual(pt.read(5).toString(), 'l'); -} -{ - // Verify object passthrough behavior - var _pt = new PassThrough({ - objectMode: true - }); - - _pt.write(1); - - _pt.write(true); - - _pt.write(false); - - _pt.write(0); - - _pt.write('foo'); - - _pt.write(''); - - _pt.write({ - a: 'b' - }); - - _pt.end(); - - assert.strictEqual(_pt.read(), 1); - assert.strictEqual(_pt.read(), true); - assert.strictEqual(_pt.read(), false); - assert.strictEqual(_pt.read(), 0); - assert.strictEqual(_pt.read(), 'foo'); - assert.strictEqual(_pt.read(), ''); - assert.deepStrictEqual(_pt.read(), { - a: 'b' - }); -} -{ - // Verify passthrough constructor behavior - var _pt2 = PassThrough(); - - assert(_pt2 instanceof PassThrough); -} -{ - // Verify transform constructor behavior - var _pt3 = Transform(); - - assert(_pt3 instanceof Transform); -} -{ - // Perform a simple transform - var _pt4 = new Transform(); - - _pt4._transform = function (c, e, cb) { - var ret = bufferShim.alloc(c.length, 'x'); - - _pt4.push(ret); - - cb(); - }; - - _pt4.write(bufferShim.from('foog')); - - _pt4.write(bufferShim.from('bark')); - - _pt4.write(bufferShim.from('bazy')); - - _pt4.write(bufferShim.from('kuel')); - - _pt4.end(); - - assert.strictEqual(_pt4.read(5).toString(), 'xxxxx'); - assert.strictEqual(_pt4.read(5).toString(), 'xxxxx'); - assert.strictEqual(_pt4.read(5).toString(), 'xxxxx'); - assert.strictEqual(_pt4.read(5).toString(), 'x'); -} -{ - // Verify simple object transform - var _pt5 = new Transform({ - objectMode: true - }); - - _pt5._transform = function (c, e, cb) { - _pt5.push(JSON.stringify(c)); - - cb(); - }; - - _pt5.write(1); - - _pt5.write(true); - - _pt5.write(false); - - _pt5.write(0); - - _pt5.write('foo'); - - _pt5.write(''); - - _pt5.write({ - a: 'b' - }); - - _pt5.end(); - - assert.strictEqual(_pt5.read(), '1'); - assert.strictEqual(_pt5.read(), 'true'); - assert.strictEqual(_pt5.read(), 'false'); - assert.strictEqual(_pt5.read(), '0'); - assert.strictEqual(_pt5.read(), '"foo"'); - assert.strictEqual(_pt5.read(), '""'); - assert.strictEqual(_pt5.read(), '{"a":"b"}'); -} -{ - // Verify async passthrough - var _pt6 = new Transform(); - - _pt6._transform = function (chunk, encoding, cb) { - setTimeout(function () { - _pt6.push(chunk); - - cb(); - }, 10); - }; - - _pt6.write(bufferShim.from('foog')); - - _pt6.write(bufferShim.from('bark')); - - _pt6.write(bufferShim.from('bazy')); - - _pt6.write(bufferShim.from('kuel')); - - _pt6.end(); - - _pt6.on('finish', common.mustCall(function () { - assert.strictEqual(_pt6.read(5).toString(), 'foogb'); - assert.strictEqual(_pt6.read(5).toString(), 'arkba'); - assert.strictEqual(_pt6.read(5).toString(), 'zykue'); - assert.strictEqual(_pt6.read(5).toString(), 'l'); - })); -} -{ - // Verify asymmetric transform (expand) - var _pt7 = new Transform(); // emit each chunk 2 times. - - - _pt7._transform = function (chunk, encoding, cb) { - setTimeout(function () { - _pt7.push(chunk); - - setTimeout(function () { - _pt7.push(chunk); - - cb(); - }, 10); - }, 10); - }; - - _pt7.write(bufferShim.from('foog')); - - _pt7.write(bufferShim.from('bark')); - - _pt7.write(bufferShim.from('bazy')); - - _pt7.write(bufferShim.from('kuel')); - - _pt7.end(); - - _pt7.on('finish', common.mustCall(function () { - assert.strictEqual(_pt7.read(5).toString(), 'foogf'); - assert.strictEqual(_pt7.read(5).toString(), 'oogba'); - assert.strictEqual(_pt7.read(5).toString(), 'rkbar'); - assert.strictEqual(_pt7.read(5).toString(), 'kbazy'); - assert.strictEqual(_pt7.read(5).toString(), 'bazyk'); - assert.strictEqual(_pt7.read(5).toString(), 'uelku'); - assert.strictEqual(_pt7.read(5).toString(), 'el'); - })); -} -{ - // Verify asymmetric transform (compress) - var _pt8 = new Transform(); // each output is the first char of 3 consecutive chunks, - // or whatever's left. - - - _pt8.state = ''; - - _pt8._transform = function (chunk, encoding, cb) { - var _this = this; - - if (!chunk) chunk = ''; - var s = chunk.toString(); - setTimeout(function () { - _this.state += s.charAt(0); - - if (_this.state.length === 3) { - _pt8.push(bufferShim.from(_this.state)); - - _this.state = ''; - } - - cb(); - }, 10); - }; - - _pt8._flush = function (cb) { - // just output whatever we have. - _pt8.push(bufferShim.from(this.state)); - - this.state = ''; - cb(); - }; - - _pt8.write(bufferShim.from('aaaa')); - - _pt8.write(bufferShim.from('bbbb')); - - _pt8.write(bufferShim.from('cccc')); - - _pt8.write(bufferShim.from('dddd')); - - _pt8.write(bufferShim.from('eeee')); - - _pt8.write(bufferShim.from('aaaa')); - - _pt8.write(bufferShim.from('bbbb')); - - _pt8.write(bufferShim.from('cccc')); - - _pt8.write(bufferShim.from('dddd')); - - _pt8.write(bufferShim.from('eeee')); - - _pt8.write(bufferShim.from('aaaa')); - - _pt8.write(bufferShim.from('bbbb')); - - _pt8.write(bufferShim.from('cccc')); - - _pt8.write(bufferShim.from('dddd')); - - _pt8.end(); // 'abcdeabcdeabcd' - - - _pt8.on('finish', common.mustCall(function () { - assert.strictEqual(_pt8.read(5).toString(), 'abcde'); - assert.strictEqual(_pt8.read(5).toString(), 'abcde'); - assert.strictEqual(_pt8.read(5).toString(), 'abcd'); - })); -} // this tests for a stall when data is written to a full stream -// that has empty transforms. - -{ - // Verify complex transform behavior - var count = 0; - var saved = null; - - var _pt9 = new Transform({ - highWaterMark: 3 - }); - - _pt9._transform = function (c, e, cb) { - if (count++ === 1) saved = c;else { - if (saved) { - _pt9.push(saved); - - saved = null; - } - - _pt9.push(c); - } - cb(); - }; - - _pt9.once('readable', function () { - process.nextTick(function () { - _pt9.write(bufferShim.from('d')); - - _pt9.write(bufferShim.from('ef'), common.mustCall(function () { - _pt9.end(); - })); - - assert.strictEqual(_pt9.read().toString(), 'abcdef'); - assert.strictEqual(_pt9.read(), null); - }); - }); - - _pt9.write(bufferShim.from('abc')); -} -{ - // Verify passthrough event emission - var _pt10 = new PassThrough(); - - var emits = 0; - - _pt10.on('readable', function () { - emits++; - }); - - _pt10.write(bufferShim.from('foog')); - - _pt10.write(bufferShim.from('bark')); - - assert.strictEqual(emits, 0); - assert.strictEqual(_pt10.read(5).toString(), 'foogb'); - assert.strictEqual(String(_pt10.read(5)), 'null'); - assert.strictEqual(emits, 0); - - _pt10.write(bufferShim.from('bazy')); - - _pt10.write(bufferShim.from('kuel')); - - assert.strictEqual(emits, 0); - assert.strictEqual(_pt10.read(5).toString(), 'arkba'); - assert.strictEqual(_pt10.read(5).toString(), 'zykue'); - assert.strictEqual(_pt10.read(5), null); - - _pt10.end(); - - assert.strictEqual(emits, 1); - assert.strictEqual(_pt10.read(5).toString(), 'l'); - assert.strictEqual(_pt10.read(5), null); - assert.strictEqual(emits, 1); -} -{ - // Verify passthrough event emission reordering - var _pt11 = new PassThrough(); - - var _emits = 0; - - _pt11.on('readable', function () { - _emits++; - }); - - _pt11.write(bufferShim.from('foog')); - - _pt11.write(bufferShim.from('bark')); - - assert.strictEqual(_emits, 0); - assert.strictEqual(_pt11.read(5).toString(), 'foogb'); - assert.strictEqual(_pt11.read(5), null); - - _pt11.once('readable', common.mustCall(function () { - assert.strictEqual(_pt11.read(5).toString(), 'arkba'); - assert.strictEqual(_pt11.read(5), null); - - _pt11.once('readable', common.mustCall(function () { - assert.strictEqual(_pt11.read(5).toString(), 'zykue'); - assert.strictEqual(_pt11.read(5), null); - - _pt11.once('readable', common.mustCall(function () { - assert.strictEqual(_pt11.read(5).toString(), 'l'); - assert.strictEqual(_pt11.read(5), null); - assert.strictEqual(_emits, 3); - })); - - _pt11.end(); - })); - - _pt11.write(bufferShim.from('kuel')); - })); - - _pt11.write(bufferShim.from('bazy')); -} -{ - // Verify passthrough facade - var _pt12 = new PassThrough(); - - var datas = []; - - _pt12.on('data', function (chunk) { - datas.push(chunk.toString()); - }); - - _pt12.on('end', common.mustCall(function () { - assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel']); - })); - - _pt12.write(bufferShim.from('foog')); - - setTimeout(function () { - _pt12.write(bufferShim.from('bark')); - - setTimeout(function () { - _pt12.write(bufferShim.from('bazy')); - - setTimeout(function () { - _pt12.write(bufferShim.from('kuel')); - - setTimeout(function () { - _pt12.end(); - }, 10); - }, 10); - }, 10); - }, 10); -} -{ - // Verify object transform (JSON parse) - var jp = new Transform({ - objectMode: true - }); - - jp._transform = function (data, encoding, cb) { - try { - jp.push(JSON.parse(data)); - cb(); - } catch (er) { - cb(er); - } - }; // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - - - var objects = [{ - foo: 'bar' - }, 100, 'string', { - nested: { - things: [{ - foo: 'bar' - }, 100, 'string'] - } - }]; - var ended = false; - jp.on('end', function () { - ended = true; - }); - forEach(objects, function (obj) { - jp.write(JSON.stringify(obj)); - var res = jp.read(); - assert.deepStrictEqual(res, obj); - }); - jp.end(); // read one more time to get the 'end' event - - jp.read(); - process.nextTick(common.mustCall(function () { - assert.strictEqual(ended, true); - })); -} -{ - // Verify object transform (JSON stringify) - var js = new Transform({ - objectMode: true - }); - - js._transform = function (data, encoding, cb) { - try { - js.push(JSON.stringify(data)); - cb(); - } catch (er) { - cb(er); - } - }; // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - - - var _objects = [{ - foo: 'bar' - }, 100, 'string', { - nested: { - things: [{ - foo: 'bar' - }, 100, 'string'] - } - }]; - var _ended = false; - js.on('end', function () { - _ended = true; - }); - forEach(_objects, function (obj) { - js.write(obj); - var res = js.read(); - assert.strictEqual(res, JSON.stringify(obj)); - }); - js.end(); // read one more time to get the 'end' event - - js.read(); - process.nextTick(common.mustCall(function () { - assert.strictEqual(_ended, true); - })); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js deleted file mode 100644 index a2e8166cf6..0000000000 --- a/test/parallel/test-stream2-unpipe-drain.js +++ /dev/null @@ -1,134 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -(function () { - // Copyright Joyent, Inc. and other Node contributors. - // - // Permission is hereby granted, free of charge, to any person obtaining a - // copy of this software and associated documentation files (the - // "Software"), to deal in the Software without restriction, including - // without limitation the rights to use, copy, modify, merge, publish, - // distribute, sublicense, and/or sell copies of the Software, and to permit - // persons to whom the Software is furnished to do so, subject to the - // following conditions: - // - // The above copyright notice and this permission notice shall be included - // in all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN - // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, - // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR - // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE - // USE OR OTHER DEALINGS IN THE SOFTWARE. - - /**/ - var bufferShim = require('safe-buffer').Buffer; - /**/ - - - require('../common'); - - var assert = require('assert/'); - - var stream = require('../../'); - - var TestWriter = - /*#__PURE__*/ - function (_stream$Writable) { - _inherits(TestWriter, _stream$Writable); - - function TestWriter() { - _classCallCheck(this, TestWriter); - - return _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).apply(this, arguments)); - } - - _createClass(TestWriter, [{ - key: "_write", - value: function _write(buffer, encoding, callback) { - console.log('write called'); // super slow write stream (callback never called) - } - }]); - - return TestWriter; - }(stream.Writable); - - var dest = new TestWriter(); - - var TestReader = - /*#__PURE__*/ - function (_stream$Readable) { - _inherits(TestReader, _stream$Readable); - - function TestReader() { - var _this; - - _classCallCheck(this, TestReader); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this)); - _this.reads = 0; - return _this; - } - - _createClass(TestReader, [{ - key: "_read", - value: function _read(size) { - this.reads += 1; - this.push(bufferShim.alloc(size)); - } - }]); - - return TestReader; - }(stream.Readable); - - var src1 = new TestReader(); - var src2 = new TestReader(); - src1.pipe(dest); - src1.once('readable', function () { - process.nextTick(function () { - src2.pipe(dest); - src2.once('readable', function () { - process.nextTick(function () { - src1.unpipe(dest); - }); - }); - }); - }); - process.on('exit', function () { - assert.strictEqual(src1.reads, 2); - assert.strictEqual(src2.reads, 2); - }); -})(); - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js deleted file mode 100644 index 93e26015d9..0000000000 --- a/test/parallel/test-stream2-unpipe-leak.js +++ /dev/null @@ -1,138 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var chunk = bufferShim.from('hallo'); - -var TestWriter = -/*#__PURE__*/ -function (_stream$Writable) { - _inherits(TestWriter, _stream$Writable); - - function TestWriter() { - _classCallCheck(this, TestWriter); - - return _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).apply(this, arguments)); - } - - _createClass(TestWriter, [{ - key: "_write", - value: function _write(buffer, encoding, callback) { - callback(null); - } - }]); - - return TestWriter; -}(stream.Writable); - -var dest = new TestWriter(); // Set this high so that we'd trigger a nextTick warning -// and/or RangeError if we do maybeReadMore wrong. - -var TestReader = -/*#__PURE__*/ -function (_stream$Readable) { - _inherits(TestReader, _stream$Readable); - - function TestReader() { - _classCallCheck(this, TestReader); - - return _possibleConstructorReturn(this, _getPrototypeOf(TestReader).call(this, { - highWaterMark: 0x10000 - })); - } - - _createClass(TestReader, [{ - key: "_read", - value: function _read(size) { - this.push(chunk); - } - }]); - - return TestReader; -}(stream.Readable); - -var src = new TestReader(); - -for (var i = 0; i < 10; i++) { - src.pipe(dest); - src.unpipe(dest); -} - -assert.strictEqual(src.listeners('end').length, 0); -assert.strictEqual(src.listeners('readable').length, 0); -assert.strictEqual(dest.listeners('unpipe').length, 0); -assert.strictEqual(dest.listeners('drain').length, 0); -assert.strictEqual(dest.listeners('error').length, 0); -assert.strictEqual(dest.listeners('close').length, 0); -assert.strictEqual(dest.listeners('finish').length, 0); -console.error(src._readableState); -process.on('exit', function () { - src.readableBuffer.length = 0; - console.error(src._readableState); - assert(src.readableLength >= src.readableHighWaterMark); - - require('tap').pass(); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js deleted file mode 100644 index 4d38254e8d..0000000000 --- a/test/parallel/test-stream2-writable.js +++ /dev/null @@ -1,465 +0,0 @@ -"use strict"; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -function _possibleConstructorReturn(self, call) { if (call && (typeof call === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } - -function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } - -function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } - -function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } - -function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var W = require('../../lib/_stream_writable'); - -var D = require('../../lib/_stream_duplex'); - -var assert = require('assert/'); - -var TestWriter = -/*#__PURE__*/ -function (_W) { - _inherits(TestWriter, _W); - - function TestWriter(opts) { - var _this; - - _classCallCheck(this, TestWriter); - - _this = _possibleConstructorReturn(this, _getPrototypeOf(TestWriter).call(this, opts)); - _this.buffer = []; - _this.written = 0; - return _this; - } - - _createClass(TestWriter, [{ - key: "_write", - value: function _write(chunk, encoding, cb) { - var _this2 = this; - - // simulate a small unpredictable latency - setTimeout(function () { - _this2.buffer.push(chunk.toString()); - - _this2.written += chunk.length; - cb(); - }, Math.floor(Math.random() * 10)); - } - }]); - - return TestWriter; -}(W); - -var chunks = new Array(50); - -for (var i = 0; i < chunks.length; i++) { - chunks[i] = 'x'.repeat(i); -} - -{ - // Verify fast writing - var tw = new TestWriter({ - highWaterMark: 100 - }); - tw.on('finish', common.mustCall(function () { - // got chunks in the right order - assert.deepStrictEqual(tw.buffer, chunks); - })); - forEach(chunks, function (chunk) { - // Ignore backpressure. Just buffer it all up. - tw.write(chunk); - }); - tw.end(); -} -{ - // Verify slow writing - var _tw = new TestWriter({ - highWaterMark: 100 - }); - - _tw.on('finish', common.mustCall(function () { - // got chunks in the right order - assert.deepStrictEqual(_tw.buffer, chunks); - })); - - var _i = 0; - - (function W() { - _tw.write(chunks[_i++]); - - if (_i < chunks.length) setTimeout(W, 10);else _tw.end(); - })(); -} -{ - // Verify write backpressure - var _tw2 = new TestWriter({ - highWaterMark: 50 - }); - - var drains = 0; - - _tw2.on('finish', common.mustCall(function () { - // got chunks in the right order - assert.deepStrictEqual(_tw2.buffer, chunks); - assert.strictEqual(drains, 17); - })); - - _tw2.on('drain', function () { - drains++; - }); - - var _i2 = 0; - - (function W() { - var ret; - - do { - ret = _tw2.write(chunks[_i2++]); - } while (ret !== false && _i2 < chunks.length); - - if (_i2 < chunks.length) { - assert(_tw2.writableLength >= 50); - - _tw2.once('drain', W); - } else { - _tw2.end(); - } - })(); -} -{ - // Verify write buffersize - var _tw3 = new TestWriter({ - highWaterMark: 100 - }); - - var encodings = ['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', undefined]; - - _tw3.on('finish', function () { - // got the expected chunks - assert.deepStrictEqual(_tw3.buffer, chunks); - }); - - forEach(chunks, function (chunk, i) { - var enc = encodings[i % encodings.length]; - chunk = bufferShim.from(chunk); - - _tw3.write(chunk.toString(enc), enc); - }); -} -{ - // Verify write with no buffersize - var _tw4 = new TestWriter({ - highWaterMark: 100, - decodeStrings: false - }); - - _tw4._write = function (chunk, encoding, cb) { - assert.strictEqual(typeof chunk, 'string'); - chunk = bufferShim.from(chunk, encoding); - return TestWriter.prototype._write.call(this, chunk, encoding, cb); - }; - - var _encodings = ['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', undefined]; - - _tw4.on('finish', function () { - // got the expected chunks - assert.deepStrictEqual(_tw4.buffer, chunks); - }); - - forEach(chunks, function (chunk, i) { - var enc = _encodings[i % _encodings.length]; - chunk = bufferShim.from(chunk); - - _tw4.write(chunk.toString(enc), enc); - }); -} -{ - // Verify write callbacks - var callbacks = chunks.map(function (chunk, i) { - return [i, function () { - callbacks._called[i] = chunk; - }]; - }).reduce(function (set, x) { - set["callback-".concat(x[0])] = x[1]; - return set; - }, {}); - callbacks._called = []; - - var _tw5 = new TestWriter({ - highWaterMark: 100 - }); - - _tw5.on('finish', common.mustCall(function () { - process.nextTick(common.mustCall(function () { - // got chunks in the right order - assert.deepStrictEqual(_tw5.buffer, chunks); // called all callbacks - - assert.deepStrictEqual(callbacks._called, chunks); - })); - })); - - forEach(chunks, function (chunk, i) { - _tw5.write(chunk, callbacks["callback-".concat(i)]); - }); - - _tw5.end(); -} -{ - // Verify end() callback - var _tw6 = new TestWriter(); - - _tw6.end(common.mustCall()); -} -{ - // Verify end() callback with chunk - var _tw7 = new TestWriter(); - - _tw7.end(bufferShim.from('hello world'), common.mustCall()); -} -{ - // Verify end() callback with chunk and encoding - var _tw8 = new TestWriter(); - - _tw8.end('hello world', 'ascii', common.mustCall()); -} -{ - // Verify end() callback after write() call - var _tw9 = new TestWriter(); - - _tw9.write(bufferShim.from('hello world')); - - _tw9.end(common.mustCall()); -} -{ - // Verify end() callback after write() callback - var _tw10 = new TestWriter(); - - var writeCalledback = false; - - _tw10.write(bufferShim.from('hello world'), function () { - writeCalledback = true; - }); - - _tw10.end(common.mustCall(function () { - assert.strictEqual(writeCalledback, true); - })); -} -{ - // Verify encoding is ignored for buffers - var _tw11 = new W(); - - var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; - _tw11._write = common.mustCall(function (chunk) { - assert.strictEqual(chunk.toString('hex'), hex); - }); - var buf = bufferShim.from(hex, 'hex'); - - _tw11.write(buf, 'latin1'); -} -{ - // Verify writables cannot be piped - var w = new W(); - w._write = common.mustNotCall(); - var gotError = false; - w.on('error', function () { - gotError = true; - }); - w.pipe(process.stdout); - assert.strictEqual(gotError, true); -} -{ - // Verify that duplex streams cannot be piped - var d = new D(); - d._read = common.mustCall(); - d._write = common.mustNotCall(); - var _gotError = false; - d.on('error', function () { - _gotError = true; - }); - d.pipe(process.stdout); - assert.strictEqual(_gotError, false); -} -{ - // Verify that end(chunk) twice is an error - var _w = new W(); - - _w._write = common.mustCall(function (msg) { - assert.strictEqual(msg.toString(), 'this is the end'); - }); - var _gotError2 = false; - - _w.on('error', function (er) { - _gotError2 = true; - assert.strictEqual(er.message, 'write after end'); - }); - - _w.end('this is the end'); - - _w.end('and so is this'); - - process.nextTick(common.mustCall(function () { - assert.strictEqual(_gotError2, true); - })); -} -{ - // Verify stream doesn't end while writing - var _w2 = new W(); - - var wrote = false; - - _w2._write = function (chunk, e, cb) { - assert.strictEqual(this.writing, undefined); - wrote = true; - this.writing = true; - setTimeout(function () { - this.writing = false; - cb(); - }, 1); - }; - - _w2.on('finish', common.mustCall(function () { - assert.strictEqual(wrote, true); - })); - - _w2.write(bufferShim.alloc(0)); - - _w2.end(); -} -{ - // Verify finish does not come before write() callback - var _w3 = new W(); - - var writeCb = false; - - _w3._write = function (chunk, e, cb) { - setTimeout(function () { - writeCb = true; - cb(); - }, 10); - }; - - _w3.on('finish', common.mustCall(function () { - assert.strictEqual(writeCb, true); - })); - - _w3.write(bufferShim.alloc(0)); - - _w3.end(); -} -{ - // Verify finish does not come before synchronous _write() callback - var _w4 = new W(); - - var _writeCb = false; - - _w4._write = function (chunk, e, cb) { - cb(); - }; - - _w4.on('finish', common.mustCall(function () { - assert.strictEqual(_writeCb, true); - })); - - _w4.write(bufferShim.alloc(0), function () { - _writeCb = true; - }); - - _w4.end(); -} -{ - // Verify finish is emitted if the last chunk is empty - var _w5 = new W(); - - _w5._write = function (chunk, e, cb) { - process.nextTick(cb); - }; - - _w5.on('finish', common.mustCall()); - - _w5.write(bufferShim.allocUnsafe(1)); - - _w5.end(bufferShim.alloc(0)); -} -{ - // Verify that finish is emitted after shutdown - var _w6 = new W(); - - var shutdown = false; - _w6._final = common.mustCall(function (cb) { - assert.strictEqual(this, _w6); - setTimeout(function () { - shutdown = true; - cb(); - }, 100); - }); - - _w6._write = function (chunk, e, cb) { - process.nextTick(cb); - }; - - _w6.on('finish', common.mustCall(function () { - assert.strictEqual(shutdown, true); - })); - - _w6.write(bufferShim.allocUnsafe(1)); - - _w6.end(bufferShim.allocUnsafe(0)); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js deleted file mode 100644 index 78a3fd8cd2..0000000000 --- a/test/parallel/test-stream3-cork-end.js +++ /dev/null @@ -1,106 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var Writable = stream.Writable; // Test the buffering behavior of Writable streams. -// -// The call to cork() triggers storing chunks which are flushed -// on calling end() and the stream subsequently ended. -// -// node version target: 0.12 - -var expectedChunks = ['please', 'buffer', 'me', 'kindly']; -var inputChunks = expectedChunks.slice(0); -var seenChunks = []; -var seenEnd = false; -var w = new Writable(); // lets arrange to store the chunks - -w._write = function (chunk, encoding, cb) { - // stream end event is not seen before the last write - assert.ok(!seenEnd); // default encoding given none was specified - - assert.strictEqual(encoding, 'buffer'); - seenChunks.push(chunk); - cb(); -}; // lets record the stream end event - - -w.on('finish', function () { - seenEnd = true; -}); - -function writeChunks(remainingChunks, callback) { - var writeChunk = remainingChunks.shift(); - var writeState; - - if (writeChunk) { - setImmediate(function () { - writeState = w.write(writeChunk); // we were not told to stop writing - - assert.ok(writeState); - writeChunks(remainingChunks, callback); - }); - } else { - callback(); - } -} // do an initial write - - -w.write('stuff'); // the write was immediate - -assert.strictEqual(seenChunks.length, 1); // reset the seen chunks - -seenChunks = []; // trigger stream buffering - -w.cork(); // write the bufferedChunks - -writeChunks(inputChunks, function () { - // should not have seen anything yet - assert.strictEqual(seenChunks.length, 0); // trigger flush and ending the stream - - w.end(); // stream should not ended in current tick - - assert.ok(!seenEnd); // buffered bytes should be seen in current tick - - assert.strictEqual(seenChunks.length, 4); // did the chunks match - - for (var i = 0, l = expectedChunks.length; i < l; i++) { - var seen = seenChunks[i]; // there was a chunk - - assert.ok(seen); - var expected = bufferShim.from(expectedChunks[i]); // it was what we expected - - assert.deepEqual(seen, expected); - } - - setImmediate(function () { - // stream should have ended in next tick - assert.ok(seenEnd); - }); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js deleted file mode 100644 index 48875fff2e..0000000000 --- a/test/parallel/test-stream3-cork-uncork.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var Writable = stream.Writable; // Test the buffering behavior of Writable streams. -// -// The call to cork() triggers storing chunks which are flushed -// on calling uncork() in the same tick. -// -// node version target: 0.12 - -var expectedChunks = ['please', 'buffer', 'me', 'kindly']; -var inputChunks = expectedChunks.slice(0); -var seenChunks = []; -var seenEnd = false; -var w = new Writable(); // lets arrange to store the chunks - -w._write = function (chunk, encoding, cb) { - // default encoding given none was specified - assert.strictEqual(encoding, 'buffer'); - seenChunks.push(chunk); - cb(); -}; // lets record the stream end event - - -w.on('finish', function () { - seenEnd = true; -}); - -function writeChunks(remainingChunks, callback) { - var writeChunk = remainingChunks.shift(); - var writeState; - - if (writeChunk) { - setImmediate(function () { - writeState = w.write(writeChunk); // we were not told to stop writing - - assert.ok(writeState); - writeChunks(remainingChunks, callback); - }); - } else { - callback(); - } -} // do an initial write - - -w.write('stuff'); // the write was immediate - -assert.strictEqual(seenChunks.length, 1); // reset the chunks seen so far - -seenChunks = []; // trigger stream buffering - -w.cork(); // write the bufferedChunks - -writeChunks(inputChunks, function () { - // should not have seen anything yet - assert.strictEqual(seenChunks.length, 0); // trigger writing out the buffer - - w.uncork(); // buffered bytes should be seen in current tick - - assert.strictEqual(seenChunks.length, 4); // did the chunks match - - for (var i = 0, l = expectedChunks.length; i < l; i++) { - var seen = seenChunks[i]; // there was a chunk - - assert.ok(seen); - var expected = bufferShim.from(expectedChunks[i]); // it was what we expected - - assert.deepEqual(seen, expected); - } - - setImmediate(function () { - // the stream should not have been ended - assert.ok(!seenEnd); - }); -}); -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js deleted file mode 100644 index 66a48c91df..0000000000 --- a/test/parallel/test-stream3-pause-then-read.js +++ /dev/null @@ -1,201 +0,0 @@ -"use strict"; - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -var Readable = stream.Readable; -var Writable = stream.Writable; -var totalChunks = 100; -var chunkSize = 99; -var expectTotalData = totalChunks * chunkSize; -var expectEndingData = expectTotalData; -var r = new Readable({ - highWaterMark: 1000 -}); -var chunks = totalChunks; - -r._read = function (n) { - console.log('_read called', chunks); - if (!(chunks % 2)) setImmediate(push);else if (!(chunks % 3)) process.nextTick(push);else push(); -}; - -var totalPushed = 0; - -function push() { - var chunk = chunks-- > 0 ? bufferShim.alloc(chunkSize, 'x') : null; - - if (chunk) { - totalPushed += chunk.length; - } - - console.log('chunks', chunks); - r.push(chunk); -} - -read100(); // first we read 100 bytes - -function read100() { - readn(100, onData); -} - -function readn(n, then) { - console.error("read ".concat(n)); - expectEndingData -= n; - - (function read() { - var c = r.read(n); - console.error('c', c); - if (!c) r.once('readable', read);else { - assert.strictEqual(c.length, n); - assert(!r.readableFlowing); - then(); - } - })(); -} // then we listen to some data events - - -function onData() { - expectEndingData -= 100; - console.error('onData'); - var seen = 0; - r.on('data', function od(c) { - seen += c.length; - - if (seen >= 100) { - // seen enough - r.removeListener('data', od); - r.pause(); - - if (seen > 100) { - // oh no, seen too much! - // put the extra back. - var diff = seen - 100; - r.unshift(c.slice(c.length - diff)); - console.error('seen too much', seen, diff); - } // Nothing should be lost in between - - - setImmediate(pipeLittle); - } - }); -} // Just pipe 200 bytes, then unshift the extra and unpipe - - -function pipeLittle() { - expectEndingData -= 200; - console.error('pipe a little'); - var w = new Writable(); - var written = 0; - w.on('finish', function () { - assert.strictEqual(written, 200); - setImmediate(read1234); - }); - - w._write = function (chunk, encoding, cb) { - written += chunk.length; - - if (written >= 200) { - r.unpipe(w); - w.end(); - cb(); - - if (written > 200) { - var diff = written - 200; - written -= diff; - r.unshift(chunk.slice(chunk.length - diff)); - } - } else { - setImmediate(cb); - } - }; - - r.pipe(w); -} // now read 1234 more bytes - - -function read1234() { - readn(1234, resumePause); -} - -function resumePause() { - console.error('resumePause'); // don't read anything, just resume and re-pause a whole bunch - - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - setImmediate(pipe); -} - -function pipe() { - console.error('pipe the rest'); - var w = new Writable(); - var written = 0; - - w._write = function (chunk, encoding, cb) { - written += chunk.length; - cb(); - }; - - w.on('finish', function () { - console.error('written', written, totalPushed); - assert.strictEqual(written, expectEndingData); - assert.strictEqual(totalPushed, expectTotalData); - - require('tap').pass(); - }); - r.pipe(w); -} - -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js deleted file mode 100644 index b6231c5ba7..0000000000 --- a/test/parallel/test-streams-highwatermark.js +++ /dev/null @@ -1,97 +0,0 @@ -"use strict"; - -/**/ -var bufferShim = require('safe-buffer').Buffer; -/**/ - - -var common = require('../common'); - -var assert = require('assert/'); - -var stream = require('../../'); - -{ - // This test ensures that the stream implementation correctly handles values - // for highWaterMark which exceed the range of signed 32 bit integers and - // rejects invalid values. - // This number exceeds the range of 32 bit integer arithmetic but should still - // be handled correctly. - var ovfl = Number.MAX_SAFE_INTEGER; - var readable = stream.Readable({ - highWaterMark: ovfl - }); - assert.strictEqual(readable._readableState.highWaterMark, ovfl); - var writable = stream.Writable({ - highWaterMark: ovfl - }); - assert.strictEqual(writable._writableState.highWaterMark, ovfl); - - var _loop = function _loop() { - var invalidHwm = _arr[_i]; - - var _loop2 = function _loop2() { - var type = _arr2[_i2]; - common.expectsError(function () { - type({ - highWaterMark: invalidHwm - }); - }, { - type: TypeError, - code: 'ERR_INVALID_OPT_VALUE', - message: "The value \"".concat(invalidHwm, "\" is invalid for option \"highWaterMark\"") - }); - }; - - for (var _i2 = 0, _arr2 = [stream.Readable, stream.Writable]; _i2 < _arr2.length; _i2++) { - _loop2(); - } - }; - - for (var _i = 0, _arr = [true, false, '5', {}, -5, NaN]; _i < _arr.length; _i++) { - _loop(); - } -} -{ - // This test ensures that the push method's implementation - // correctly handles the edge case where the highWaterMark and - // the state.length are both zero - var _readable = stream.Readable({ - highWaterMark: 0 - }); - - for (var i = 0; i < 3; i++) { - var needMoreData = _readable.push(); - - assert.strictEqual(needMoreData, true); - } -} -{ - // This test ensures that the read(n) method's implementation - // correctly handles the edge case where the highWaterMark, state.length - // and n are all zero - var _readable2 = stream.Readable({ - highWaterMark: 0 - }); - - _readable2._read = common.mustCall(); - - _readable2.read(0); -} -; - -(function () { - var t = require('tap'); - - t.pass('sync run'); -})(); - -var _list = process.listeners('uncaughtException'); - -process.removeAllListeners('uncaughtException'); - -_list.pop(); - -_list.forEach(function (e) { - return process.on('uncaughtException', e); -}); \ No newline at end of file From 90c25a5bef9b65cd87ec2d5ea01d49c3f0261959 Mon Sep 17 00:00:00 2001 From: Shogun Date: Wed, 13 Apr 2022 14:46:16 +0200 Subject: [PATCH 02/19] feat: Update to Node 17.9.0. --- .airtap.yml | 6 +- .github/workflows/ci.yml | 6 +- .github/workflows/sauce.yml | 8 +- .gitignore | 5 +- README.md | 4 +- build/build.mjs | 38 +- build/files.mjs | 34 +- build/footers.mjs | 73 +- build/replacements.mjs | 411 ++- c8.json | 10 +- examples/capslock-type.cjs | 2 + examples/typer.mjs | 3 +- lib/_stream_duplex.js | 5 + lib/_stream_passthrough.js | 5 + lib/_stream_readable.js | 5 + lib/_stream_transform.js | 5 + lib/_stream_wrap.js | 5 + lib/_stream_writable.js | 5 + lib/browser.js | 38 + lib/index.js | 71 + lib/internal/errors.js | 1663 ++++++++++++ lib/internal/inspect-browser.js | 2299 +++++++++++++++++ lib/internal/inspect.js | 2299 +++++++++++++++++ lib/internal/js_stream_socket.js | 242 ++ lib/internal/primordials.js | 446 ++++ lib/internal/streams/add-abort-signal.js | 46 + lib/internal/streams/buffer_list.js | 181 ++ lib/internal/streams/compose.js | 174 ++ lib/internal/streams/destroy.js | 368 +++ lib/internal/streams/duplex.js | 146 ++ lib/internal/streams/duplexify.js | 388 +++ lib/internal/streams/end-of-stream.js | 252 ++ lib/internal/streams/from.js | 113 + lib/internal/streams/lazy_transform.js | 63 + lib/internal/streams/legacy.js | 114 + lib/internal/streams/operators.js | 409 +++ lib/internal/streams/passthrough.js | 47 + lib/internal/streams/pipeline.js | 357 +++ lib/internal/streams/readable.js | 1407 ++++++++++ lib/internal/streams/state.js | 36 + lib/internal/streams/transform.js | 247 ++ lib/internal/streams/utils.js | 260 ++ lib/internal/streams/writable.js | 897 +++++++ lib/internal/validators.js | 276 ++ lib/stream.js | 137 + lib/stream/promises.js | 41 + lib/util.js | 78 + package.json | 14 +- src/browser.js | 42 +- src/index.js | 66 +- .../test-stream-pipe-error-handling.js | 16 +- .../browser/test-stream-unshift-read-race.js | 29 +- .../test-stream2-pipe-error-handling.js | 16 +- .../test-stream2-readable-legacy-drain.js | 7 - src/test/browser/test-stream2-transform.js | 5 +- src/test/browser/test-stream2-writable.js | 2 +- src/test/ours/test-errors.js | 20 +- ...lex-fake-timers.js => test-fake-timers.js} | 4 +- src/util.js | 79 +- src/uv-browser.js | 93 - tap.yml | 3 +- test/browser/test-stream-big-packet.js | 68 + test/browser/test-stream-big-push.js | 70 + test/browser/test-stream-duplex.js | 36 + test/browser/test-stream-end-paused.js | 30 + test/browser/test-stream-finished.js | 65 + test/browser/test-stream-ispaused.js | 27 + test/browser/test-stream-pipe-after-end.js | 67 + .../browser/test-stream-pipe-cleanup-pause.js | 46 + test/browser/test-stream-pipe-cleanup.js | 115 + .../test-stream-pipe-error-handling.js | 105 + test/browser/test-stream-pipe-event.js | 34 + .../test-stream-pipe-without-listenerCount.js | 20 + test/browser/test-stream-pipeline.js | 109 + test/browser/test-stream-push-order.js | 32 + test/browser/test-stream-push-strings.js | 55 + ...stream-readable-constructor-set-methods.js | 23 + test/browser/test-stream-readable-event.js | 105 + test/browser/test-stream-sync-write.js | 46 + ...tream-transform-constructor-set-methods.js | 35 + ...tream-transform-objectmode-falsey-value.js | 35 + .../test-stream-transform-split-objectmode.js | 57 + .../test-stream-unshift-empty-chunk.js | 62 + test/browser/test-stream-unshift-read-race.js | 121 + ...stream-writable-change-default-encoding.js | 69 + ...stream-writable-constructor-set-methods.js | 38 + .../test-stream-writable-decoded-encoding.js | 49 + test/browser/test-stream-writev.js | 101 + ...est-stream2-base64-single-char-read-end.js | 39 + test/browser/test-stream2-compatibility.js | 34 + test/browser/test-stream2-large-read-stall.js | 60 + test/browser/test-stream2-objects.js | 304 +++ .../test-stream2-pipe-error-handling.js | 89 + .../test-stream2-pipe-error-once-listener.js | 39 + test/browser/test-stream2-push.js | 117 + ...st-stream2-readable-empty-buffer-no-eof.js | 93 + .../test-stream2-readable-from-list.js | 65 + .../test-stream2-readable-legacy-drain.js | 45 + .../test-stream2-readable-non-empty-end.js | 58 + .../test-stream2-readable-wrap-empty.js | 23 + test/browser/test-stream2-readable-wrap.js | 94 + test/browser/test-stream2-set-encoding.js | 335 +++ test/browser/test-stream2-transform.js | 484 ++++ test/browser/test-stream2-unpipe-drain.js | 63 + test/browser/test-stream2-writable.js | 430 +++ test/browser/test-stream3-pause-then-read.js | 147 ++ test/common/fixtures.js | 38 + test/common/fixtures.mjs | 17 + test/common/index.js | 952 +++++++ test/common/index.mjs | 97 + test/common/tmpdir.js | 61 + test/fixtures/elipses.txt | 1 + test/fixtures/empty-with-bom.txt | 1 + test/fixtures/empty.txt | 0 test/fixtures/file-to-read-with-bom.txt | 3 + test/fixtures/file-to-read-without-bom.txt | 3 + test/fixtures/outside.txt | 2 + test/fixtures/readfile_pipe_test.txt | 5 + test/fixtures/tls-session-ticket.txt | 23 + test/fixtures/x.txt | 1 + test/fixtures/x1024.txt | 1 + test/ours/test-errors.js | 132 + test/ours/test-fake-timers.js | 40 + test/ours/test-stream-sync-write.js | 44 + .../test-readable-from-iterator-closing.js | 212 ++ test/parallel/test-readable-from.js | 238 ++ test/parallel/test-readable-large-hwm.js | 42 + test/parallel/test-readable-single-end.js | 31 + test/parallel/test-stream-add-abort-signal.js | 42 + test/parallel/test-stream-aliases-legacy.js | 29 + test/parallel/test-stream-asIndexedPairs.mjs | 64 + test/parallel/test-stream-auto-destroy.js | 127 + ...riters-in-synchronously-recursion-write.js | 43 + test/parallel/test-stream-backpressure.js | 54 + ...-base-prototype-accessors-enumerability.js | 36 + .../parallel/test-stream-base-typechecking.js | 33 + test/parallel/test-stream-big-packet.js | 80 + test/parallel/test-stream-big-push.js | 89 + test/parallel/test-stream-buffer-list.js | 99 + test/parallel/test-stream-catch-rejections.js | 66 + test/parallel/test-stream-compose.js | 440 ++++ .../test-stream-construct-async-error.js | 255 ++ test/parallel/test-stream-construct.js | 295 +++ .../test-stream-decoder-objectmode.js | 35 + .../test-stream-destroy-event-order.js | 39 + test/parallel/test-stream-drop-take.js | 117 + test/parallel/test-stream-duplex-destroy.js | 272 ++ test/parallel/test-stream-duplex-end.js | 56 + test/parallel/test-stream-duplex-from.js | 295 +++ test/parallel/test-stream-duplex-props.js | 46 + .../test-stream-duplex-readable-end.js | 44 + .../test-stream-duplex-readable-writable.js | 61 + .../test-stream-duplex-writable-finished.js | 45 + test/parallel/test-stream-duplex.js | 70 + test/parallel/test-stream-end-paused.js | 65 + test/parallel/test-stream-error-once.js | 34 + test/parallel/test-stream-events-prepend.js | 41 + test/parallel/test-stream-filter.js | 199 ++ test/parallel/test-stream-finished.js | 673 +++++ test/parallel/test-stream-flatMap.js | 154 ++ test/parallel/test-stream-forEach.js | 154 ++ test/parallel/test-stream-inheritance.js | 78 + test/parallel/test-stream-ispaused.js | 59 + ...-stream-iterator-helpers-test262-tests.mjs | 179 ++ .../test-stream-objectmode-undefined.js | 59 + .../test-stream-once-readable-pipe.js | 76 + .../parallel/test-stream-passthrough-drain.js | 23 + test/parallel/test-stream-pipe-after-end.js | 84 + ...t-stream-pipe-await-drain-manual-resume.js | 90 + ...tream-pipe-await-drain-push-while-write.js | 51 + test/parallel/test-stream-pipe-await-drain.js | 82 + .../test-stream-pipe-cleanup-pause.js | 52 + test/parallel/test-stream-pipe-cleanup.js | 140 + .../test-stream-pipe-error-handling.js | 139 + .../test-stream-pipe-error-unhandled.js | 36 + test/parallel/test-stream-pipe-event.js | 66 + .../test-stream-pipe-flow-after-unpipe.js | 44 + test/parallel/test-stream-pipe-flow.js | 105 + .../test-stream-pipe-manual-resume.js | 50 + .../test-stream-pipe-multiple-pipes.js | 66 + test/parallel/test-stream-pipe-needDrain.js | 46 + ...test-stream-pipe-same-destination-twice.js | 93 + .../test-stream-pipe-unpipe-streams.js | 111 + .../test-stream-pipe-without-listenerCount.js | 32 + .../test-stream-pipeline-async-iterator.js | 46 + test/parallel/test-stream-pipeline-http2.js | 51 + test/parallel/test-stream-pipeline-process.js | 41 + ...t-stream-pipeline-queued-end-in-destroy.js | 54 + .../parallel/test-stream-pipeline-uncaught.js | 37 + .../test-stream-pipeline-with-empty-string.js | 33 + test/parallel/test-stream-preprocess.js | 75 + test/parallel/test-stream-promises.js | 118 + test/parallel/test-stream-push-order.js | 67 + test/parallel/test-stream-push-strings.js | 82 + test/parallel/test-stream-readable-aborted.js | 81 + ...t-stream-readable-add-chunk-during-data.js | 36 + ...stream-readable-constructor-set-methods.js | 26 + test/parallel/test-stream-readable-data.js | 34 + test/parallel/test-stream-readable-destroy.js | 418 +++ test/parallel/test-stream-readable-didRead.js | 126 + ...eam-readable-emit-readable-short-stream.js | 161 ++ .../test-stream-readable-emittedReadable.js | 88 + .../test-stream-readable-end-destroyed.js | 32 + test/parallel/test-stream-readable-ended.js | 61 + .../test-stream-readable-error-end.js | 30 + test/parallel/test-stream-readable-event.js | 143 + .../test-stream-readable-flow-recursion.js | 92 + .../test-stream-readable-hwm-0-async.js | 42 + ...test-stream-readable-hwm-0-no-flow-data.js | 119 + test/parallel/test-stream-readable-hwm-0.js | 45 + .../test-stream-readable-infinite-read.js | 47 + .../test-stream-readable-invalid-chunk.js | 49 + .../test-stream-readable-needReadable.js | 114 + .../test-stream-readable-next-no-null.js | 34 + ...st-stream-readable-no-unneeded-readable.js | 77 + ...stream-readable-object-multi-push-async.js | 198 ++ .../test-stream-readable-pause-and-resume.js | 89 + ...st-stream-readable-readable-then-resume.js | 46 + .../parallel/test-stream-readable-readable.js | 60 + ...est-stream-readable-reading-readingMore.js | 186 ++ .../test-stream-readable-resume-hwm.js | 36 + .../test-stream-readable-resumeScheduled.js | 80 + ...m-readable-setEncoding-existing-buffers.js | 75 + .../test-stream-readable-setEncoding-null.js | 30 + .../test-stream-readable-unpipe-resume.js | 35 + test/parallel/test-stream-readable-unshift.js | 185 ++ ...tream-readable-with-unimplemented-_read.js | 28 + .../test-stream-readableListening-state.js | 49 + test/parallel/test-stream-reduce.js | 147 ++ test/parallel/test-stream-some-find-every.mjs | 183 ++ test/parallel/test-stream-toArray.js | 108 + .../test-stream-transform-callback-twice.js | 29 + ...tream-transform-constructor-set-methods.js | 58 + .../parallel/test-stream-transform-destroy.js | 158 ++ .../test-stream-transform-final-sync.js | 125 + test/parallel/test-stream-transform-final.js | 127 + .../test-stream-transform-flush-data.js | 43 + ...tream-transform-objectmode-falsey-value.js | 66 + ...st-stream-transform-split-highwatermark.js | 107 + .../test-stream-transform-split-objectmode.js | 96 + test/parallel/test-stream-uint8array.js | 116 + test/parallel/test-stream-unpipe-event.js | 100 + .../test-stream-unshift-empty-chunk.js | 95 + .../parallel/test-stream-unshift-read-race.js | 143 + test/parallel/test-stream-wrap-drain.js | 65 + test/parallel/test-stream-wrap-encoding.js | 58 + test/parallel/test-stream-wrap.js | 48 + .../test-stream-writable-callback-twice.js | 29 + ...stream-writable-change-default-encoding.js | 93 + .../test-stream-writable-clear-buffer.js | 50 + ...stream-writable-constructor-set-methods.js | 56 + .../test-stream-writable-decoded-encoding.js | 73 + test/parallel/test-stream-writable-destroy.js | 506 ++++ .../test-stream-writable-end-cb-error.js | 93 + .../test-stream-writable-end-cb-uncaught.js | 39 + .../test-stream-writable-end-multiple.js | 37 + .../test-stream-writable-ended-state.js | 47 + .../test-stream-writable-final-async.js | 48 + .../test-stream-writable-final-destroy.js | 36 + .../test-stream-writable-final-throw.js | 38 + .../test-stream-writable-finish-destroyed.js | 48 + .../test-stream-writable-finished-state.js | 37 + .../parallel/test-stream-writable-finished.js | 114 + .../test-stream-writable-invalid-chunk.js | 51 + .../test-stream-writable-needdrain-state.js | 40 + test/parallel/test-stream-writable-null.js | 62 + .../test-stream-writable-properties.js | 37 + .../test-stream-writable-samecb-singletick.js | 51 + .../parallel/test-stream-writable-writable.js | 63 + .../test-stream-writable-write-cb-error.js | 73 + .../test-stream-writable-write-cb-twice.js | 67 + .../test-stream-writable-write-error.js | 90 + ...est-stream-writable-write-writev-finish.js | 167 ++ .../test-stream-writableState-ending.js | 52 + ...ableState-uncorked-bufferedRequestCount.js | 72 + test/parallel/test-stream-write-destroy.js | 83 + test/parallel/test-stream-write-drain.js | 31 + test/parallel/test-stream-write-final.js | 39 + test/parallel/test-stream-writev.js | 145 ++ ...est-stream2-base64-single-char-read-end.js | 71 + test/parallel/test-stream2-basic.js | 460 ++++ test/parallel/test-stream2-compatibility.js | 85 + test/parallel/test-stream2-decode-partial.js | 38 + .../test-stream2-finish-pipe-error.js | 35 + test/parallel/test-stream2-finish-pipe.js | 59 + .../test-stream2-httpclient-response-end.js | 40 + .../parallel/test-stream2-large-read-stall.js | 89 + test/parallel/test-stream2-objects.js | 312 +++ .../test-stream2-pipe-error-handling.js | 121 + .../test-stream2-pipe-error-once-listener.js | 68 + test/parallel/test-stream2-push.js | 151 ++ test/parallel/test-stream2-read-sync-stack.js | 61 + ...st-stream2-readable-empty-buffer-no-eof.js | 132 + .../test-stream2-readable-from-list.js | 112 + .../test-stream2-readable-legacy-drain.js | 70 + .../test-stream2-readable-non-empty-end.js | 87 + .../test-stream2-readable-wrap-destroy.js | 42 + .../test-stream2-readable-wrap-empty.js | 53 + .../test-stream2-readable-wrap-error.js | 52 + test/parallel/test-stream2-readable-wrap.js | 115 + test/parallel/test-stream2-set-encoding.js | 338 +++ test/parallel/test-stream2-transform.js | 485 ++++ test/parallel/test-stream2-unpipe-drain.js | 87 + test/parallel/test-stream2-unpipe-leak.js | 88 + test/parallel/test-stream2-writable.js | 474 ++++ test/parallel/test-stream3-cork-end.js | 106 + test/parallel/test-stream3-cork-uncork.js | 101 + test/parallel/test-stream3-pause-then-read.js | 185 ++ test/parallel/test-streams-highwatermark.js | 102 + test/test-browser.js | 84 + 310 files changed, 37922 insertions(+), 393 deletions(-) create mode 100644 lib/_stream_duplex.js create mode 100644 lib/_stream_passthrough.js create mode 100644 lib/_stream_readable.js create mode 100644 lib/_stream_transform.js create mode 100644 lib/_stream_wrap.js create mode 100644 lib/_stream_writable.js create mode 100644 lib/browser.js create mode 100644 lib/index.js create mode 100644 lib/internal/errors.js create mode 100644 lib/internal/inspect-browser.js create mode 100644 lib/internal/inspect.js create mode 100644 lib/internal/js_stream_socket.js create mode 100644 lib/internal/primordials.js create mode 100644 lib/internal/streams/add-abort-signal.js create mode 100644 lib/internal/streams/buffer_list.js create mode 100644 lib/internal/streams/compose.js create mode 100644 lib/internal/streams/destroy.js create mode 100644 lib/internal/streams/duplex.js create mode 100644 lib/internal/streams/duplexify.js create mode 100644 lib/internal/streams/end-of-stream.js create mode 100644 lib/internal/streams/from.js create mode 100644 lib/internal/streams/lazy_transform.js create mode 100644 lib/internal/streams/legacy.js create mode 100644 lib/internal/streams/operators.js create mode 100644 lib/internal/streams/passthrough.js create mode 100644 lib/internal/streams/pipeline.js create mode 100644 lib/internal/streams/readable.js create mode 100644 lib/internal/streams/state.js create mode 100644 lib/internal/streams/transform.js create mode 100644 lib/internal/streams/utils.js create mode 100644 lib/internal/streams/writable.js create mode 100644 lib/internal/validators.js create mode 100644 lib/stream.js create mode 100644 lib/stream/promises.js create mode 100644 lib/util.js rename src/test/ours/{test-lolex-fake-timers.js => test-fake-timers.js} (85%) delete mode 100644 src/uv-browser.js create mode 100644 test/browser/test-stream-big-packet.js create mode 100644 test/browser/test-stream-big-push.js create mode 100644 test/browser/test-stream-duplex.js create mode 100644 test/browser/test-stream-end-paused.js create mode 100644 test/browser/test-stream-finished.js create mode 100644 test/browser/test-stream-ispaused.js create mode 100644 test/browser/test-stream-pipe-after-end.js create mode 100644 test/browser/test-stream-pipe-cleanup-pause.js create mode 100644 test/browser/test-stream-pipe-cleanup.js create mode 100644 test/browser/test-stream-pipe-error-handling.js create mode 100644 test/browser/test-stream-pipe-event.js create mode 100644 test/browser/test-stream-pipe-without-listenerCount.js create mode 100644 test/browser/test-stream-pipeline.js create mode 100644 test/browser/test-stream-push-order.js create mode 100644 test/browser/test-stream-push-strings.js create mode 100644 test/browser/test-stream-readable-constructor-set-methods.js create mode 100644 test/browser/test-stream-readable-event.js create mode 100644 test/browser/test-stream-sync-write.js create mode 100644 test/browser/test-stream-transform-constructor-set-methods.js create mode 100644 test/browser/test-stream-transform-objectmode-falsey-value.js create mode 100644 test/browser/test-stream-transform-split-objectmode.js create mode 100644 test/browser/test-stream-unshift-empty-chunk.js create mode 100644 test/browser/test-stream-unshift-read-race.js create mode 100644 test/browser/test-stream-writable-change-default-encoding.js create mode 100644 test/browser/test-stream-writable-constructor-set-methods.js create mode 100644 test/browser/test-stream-writable-decoded-encoding.js create mode 100644 test/browser/test-stream-writev.js create mode 100644 test/browser/test-stream2-base64-single-char-read-end.js create mode 100644 test/browser/test-stream2-compatibility.js create mode 100644 test/browser/test-stream2-large-read-stall.js create mode 100644 test/browser/test-stream2-objects.js create mode 100644 test/browser/test-stream2-pipe-error-handling.js create mode 100644 test/browser/test-stream2-pipe-error-once-listener.js create mode 100644 test/browser/test-stream2-push.js create mode 100644 test/browser/test-stream2-readable-empty-buffer-no-eof.js create mode 100644 test/browser/test-stream2-readable-from-list.js create mode 100644 test/browser/test-stream2-readable-legacy-drain.js create mode 100644 test/browser/test-stream2-readable-non-empty-end.js create mode 100644 test/browser/test-stream2-readable-wrap-empty.js create mode 100644 test/browser/test-stream2-readable-wrap.js create mode 100644 test/browser/test-stream2-set-encoding.js create mode 100644 test/browser/test-stream2-transform.js create mode 100644 test/browser/test-stream2-unpipe-drain.js create mode 100644 test/browser/test-stream2-writable.js create mode 100644 test/browser/test-stream3-pause-then-read.js create mode 100644 test/common/fixtures.js create mode 100644 test/common/fixtures.mjs create mode 100644 test/common/index.js create mode 100644 test/common/index.mjs create mode 100644 test/common/tmpdir.js create mode 100644 test/fixtures/elipses.txt create mode 100644 test/fixtures/empty-with-bom.txt create mode 100644 test/fixtures/empty.txt create mode 100644 test/fixtures/file-to-read-with-bom.txt create mode 100644 test/fixtures/file-to-read-without-bom.txt create mode 100644 test/fixtures/outside.txt create mode 100644 test/fixtures/readfile_pipe_test.txt create mode 100644 test/fixtures/tls-session-ticket.txt create mode 100644 test/fixtures/x.txt create mode 100644 test/fixtures/x1024.txt create mode 100644 test/ours/test-errors.js create mode 100644 test/ours/test-fake-timers.js create mode 100644 test/ours/test-stream-sync-write.js create mode 100644 test/parallel/test-readable-from-iterator-closing.js create mode 100644 test/parallel/test-readable-from.js create mode 100644 test/parallel/test-readable-large-hwm.js create mode 100644 test/parallel/test-readable-single-end.js create mode 100644 test/parallel/test-stream-add-abort-signal.js create mode 100644 test/parallel/test-stream-aliases-legacy.js create mode 100644 test/parallel/test-stream-asIndexedPairs.mjs create mode 100644 test/parallel/test-stream-auto-destroy.js create mode 100644 test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js create mode 100644 test/parallel/test-stream-backpressure.js create mode 100644 test/parallel/test-stream-base-prototype-accessors-enumerability.js create mode 100644 test/parallel/test-stream-base-typechecking.js create mode 100644 test/parallel/test-stream-big-packet.js create mode 100644 test/parallel/test-stream-big-push.js create mode 100644 test/parallel/test-stream-buffer-list.js create mode 100644 test/parallel/test-stream-catch-rejections.js create mode 100644 test/parallel/test-stream-compose.js create mode 100644 test/parallel/test-stream-construct-async-error.js create mode 100644 test/parallel/test-stream-construct.js create mode 100644 test/parallel/test-stream-decoder-objectmode.js create mode 100644 test/parallel/test-stream-destroy-event-order.js create mode 100644 test/parallel/test-stream-drop-take.js create mode 100644 test/parallel/test-stream-duplex-destroy.js create mode 100644 test/parallel/test-stream-duplex-end.js create mode 100644 test/parallel/test-stream-duplex-from.js create mode 100644 test/parallel/test-stream-duplex-props.js create mode 100644 test/parallel/test-stream-duplex-readable-end.js create mode 100644 test/parallel/test-stream-duplex-readable-writable.js create mode 100644 test/parallel/test-stream-duplex-writable-finished.js create mode 100644 test/parallel/test-stream-duplex.js create mode 100644 test/parallel/test-stream-end-paused.js create mode 100644 test/parallel/test-stream-error-once.js create mode 100644 test/parallel/test-stream-events-prepend.js create mode 100644 test/parallel/test-stream-filter.js create mode 100644 test/parallel/test-stream-finished.js create mode 100644 test/parallel/test-stream-flatMap.js create mode 100644 test/parallel/test-stream-forEach.js create mode 100644 test/parallel/test-stream-inheritance.js create mode 100644 test/parallel/test-stream-ispaused.js create mode 100644 test/parallel/test-stream-iterator-helpers-test262-tests.mjs create mode 100644 test/parallel/test-stream-objectmode-undefined.js create mode 100644 test/parallel/test-stream-once-readable-pipe.js create mode 100644 test/parallel/test-stream-passthrough-drain.js create mode 100644 test/parallel/test-stream-pipe-after-end.js create mode 100644 test/parallel/test-stream-pipe-await-drain-manual-resume.js create mode 100644 test/parallel/test-stream-pipe-await-drain-push-while-write.js create mode 100644 test/parallel/test-stream-pipe-await-drain.js create mode 100644 test/parallel/test-stream-pipe-cleanup-pause.js create mode 100644 test/parallel/test-stream-pipe-cleanup.js create mode 100644 test/parallel/test-stream-pipe-error-handling.js create mode 100644 test/parallel/test-stream-pipe-error-unhandled.js create mode 100644 test/parallel/test-stream-pipe-event.js create mode 100644 test/parallel/test-stream-pipe-flow-after-unpipe.js create mode 100644 test/parallel/test-stream-pipe-flow.js create mode 100644 test/parallel/test-stream-pipe-manual-resume.js create mode 100644 test/parallel/test-stream-pipe-multiple-pipes.js create mode 100644 test/parallel/test-stream-pipe-needDrain.js create mode 100644 test/parallel/test-stream-pipe-same-destination-twice.js create mode 100644 test/parallel/test-stream-pipe-unpipe-streams.js create mode 100644 test/parallel/test-stream-pipe-without-listenerCount.js create mode 100644 test/parallel/test-stream-pipeline-async-iterator.js create mode 100644 test/parallel/test-stream-pipeline-http2.js create mode 100644 test/parallel/test-stream-pipeline-process.js create mode 100644 test/parallel/test-stream-pipeline-queued-end-in-destroy.js create mode 100644 test/parallel/test-stream-pipeline-uncaught.js create mode 100644 test/parallel/test-stream-pipeline-with-empty-string.js create mode 100644 test/parallel/test-stream-preprocess.js create mode 100644 test/parallel/test-stream-promises.js create mode 100644 test/parallel/test-stream-push-order.js create mode 100644 test/parallel/test-stream-push-strings.js create mode 100644 test/parallel/test-stream-readable-aborted.js create mode 100644 test/parallel/test-stream-readable-add-chunk-during-data.js create mode 100644 test/parallel/test-stream-readable-constructor-set-methods.js create mode 100644 test/parallel/test-stream-readable-data.js create mode 100644 test/parallel/test-stream-readable-destroy.js create mode 100644 test/parallel/test-stream-readable-didRead.js create mode 100644 test/parallel/test-stream-readable-emit-readable-short-stream.js create mode 100644 test/parallel/test-stream-readable-emittedReadable.js create mode 100644 test/parallel/test-stream-readable-end-destroyed.js create mode 100644 test/parallel/test-stream-readable-ended.js create mode 100644 test/parallel/test-stream-readable-error-end.js create mode 100644 test/parallel/test-stream-readable-event.js create mode 100644 test/parallel/test-stream-readable-flow-recursion.js create mode 100644 test/parallel/test-stream-readable-hwm-0-async.js create mode 100644 test/parallel/test-stream-readable-hwm-0-no-flow-data.js create mode 100644 test/parallel/test-stream-readable-hwm-0.js create mode 100644 test/parallel/test-stream-readable-infinite-read.js create mode 100644 test/parallel/test-stream-readable-invalid-chunk.js create mode 100644 test/parallel/test-stream-readable-needReadable.js create mode 100644 test/parallel/test-stream-readable-next-no-null.js create mode 100644 test/parallel/test-stream-readable-no-unneeded-readable.js create mode 100644 test/parallel/test-stream-readable-object-multi-push-async.js create mode 100644 test/parallel/test-stream-readable-pause-and-resume.js create mode 100644 test/parallel/test-stream-readable-readable-then-resume.js create mode 100644 test/parallel/test-stream-readable-readable.js create mode 100644 test/parallel/test-stream-readable-reading-readingMore.js create mode 100644 test/parallel/test-stream-readable-resume-hwm.js create mode 100644 test/parallel/test-stream-readable-resumeScheduled.js create mode 100644 test/parallel/test-stream-readable-setEncoding-existing-buffers.js create mode 100644 test/parallel/test-stream-readable-setEncoding-null.js create mode 100644 test/parallel/test-stream-readable-unpipe-resume.js create mode 100644 test/parallel/test-stream-readable-unshift.js create mode 100644 test/parallel/test-stream-readable-with-unimplemented-_read.js create mode 100644 test/parallel/test-stream-readableListening-state.js create mode 100644 test/parallel/test-stream-reduce.js create mode 100644 test/parallel/test-stream-some-find-every.mjs create mode 100644 test/parallel/test-stream-toArray.js create mode 100644 test/parallel/test-stream-transform-callback-twice.js create mode 100644 test/parallel/test-stream-transform-constructor-set-methods.js create mode 100644 test/parallel/test-stream-transform-destroy.js create mode 100644 test/parallel/test-stream-transform-final-sync.js create mode 100644 test/parallel/test-stream-transform-final.js create mode 100644 test/parallel/test-stream-transform-flush-data.js create mode 100644 test/parallel/test-stream-transform-objectmode-falsey-value.js create mode 100644 test/parallel/test-stream-transform-split-highwatermark.js create mode 100644 test/parallel/test-stream-transform-split-objectmode.js create mode 100644 test/parallel/test-stream-uint8array.js create mode 100644 test/parallel/test-stream-unpipe-event.js create mode 100644 test/parallel/test-stream-unshift-empty-chunk.js create mode 100644 test/parallel/test-stream-unshift-read-race.js create mode 100644 test/parallel/test-stream-wrap-drain.js create mode 100644 test/parallel/test-stream-wrap-encoding.js create mode 100644 test/parallel/test-stream-wrap.js create mode 100644 test/parallel/test-stream-writable-callback-twice.js create mode 100644 test/parallel/test-stream-writable-change-default-encoding.js create mode 100644 test/parallel/test-stream-writable-clear-buffer.js create mode 100644 test/parallel/test-stream-writable-constructor-set-methods.js create mode 100644 test/parallel/test-stream-writable-decoded-encoding.js create mode 100644 test/parallel/test-stream-writable-destroy.js create mode 100644 test/parallel/test-stream-writable-end-cb-error.js create mode 100644 test/parallel/test-stream-writable-end-cb-uncaught.js create mode 100644 test/parallel/test-stream-writable-end-multiple.js create mode 100644 test/parallel/test-stream-writable-ended-state.js create mode 100644 test/parallel/test-stream-writable-final-async.js create mode 100644 test/parallel/test-stream-writable-final-destroy.js create mode 100644 test/parallel/test-stream-writable-final-throw.js create mode 100644 test/parallel/test-stream-writable-finish-destroyed.js create mode 100644 test/parallel/test-stream-writable-finished-state.js create mode 100644 test/parallel/test-stream-writable-finished.js create mode 100644 test/parallel/test-stream-writable-invalid-chunk.js create mode 100644 test/parallel/test-stream-writable-needdrain-state.js create mode 100644 test/parallel/test-stream-writable-null.js create mode 100644 test/parallel/test-stream-writable-properties.js create mode 100644 test/parallel/test-stream-writable-samecb-singletick.js create mode 100644 test/parallel/test-stream-writable-writable.js create mode 100644 test/parallel/test-stream-writable-write-cb-error.js create mode 100644 test/parallel/test-stream-writable-write-cb-twice.js create mode 100644 test/parallel/test-stream-writable-write-error.js create mode 100644 test/parallel/test-stream-writable-write-writev-finish.js create mode 100644 test/parallel/test-stream-writableState-ending.js create mode 100644 test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js create mode 100644 test/parallel/test-stream-write-destroy.js create mode 100644 test/parallel/test-stream-write-drain.js create mode 100644 test/parallel/test-stream-write-final.js create mode 100644 test/parallel/test-stream-writev.js create mode 100644 test/parallel/test-stream2-base64-single-char-read-end.js create mode 100644 test/parallel/test-stream2-basic.js create mode 100644 test/parallel/test-stream2-compatibility.js create mode 100644 test/parallel/test-stream2-decode-partial.js create mode 100644 test/parallel/test-stream2-finish-pipe-error.js create mode 100644 test/parallel/test-stream2-finish-pipe.js create mode 100644 test/parallel/test-stream2-httpclient-response-end.js create mode 100644 test/parallel/test-stream2-large-read-stall.js create mode 100644 test/parallel/test-stream2-objects.js create mode 100644 test/parallel/test-stream2-pipe-error-handling.js create mode 100644 test/parallel/test-stream2-pipe-error-once-listener.js create mode 100644 test/parallel/test-stream2-push.js create mode 100644 test/parallel/test-stream2-read-sync-stack.js create mode 100644 test/parallel/test-stream2-readable-empty-buffer-no-eof.js create mode 100644 test/parallel/test-stream2-readable-from-list.js create mode 100644 test/parallel/test-stream2-readable-legacy-drain.js create mode 100644 test/parallel/test-stream2-readable-non-empty-end.js create mode 100644 test/parallel/test-stream2-readable-wrap-destroy.js create mode 100644 test/parallel/test-stream2-readable-wrap-empty.js create mode 100644 test/parallel/test-stream2-readable-wrap-error.js create mode 100644 test/parallel/test-stream2-readable-wrap.js create mode 100644 test/parallel/test-stream2-set-encoding.js create mode 100644 test/parallel/test-stream2-transform.js create mode 100644 test/parallel/test-stream2-unpipe-drain.js create mode 100644 test/parallel/test-stream2-unpipe-leak.js create mode 100644 test/parallel/test-stream2-writable.js create mode 100644 test/parallel/test-stream3-cork-end.js create mode 100644 test/parallel/test-stream3-cork-uncork.js create mode 100644 test/parallel/test-stream3-pause-then-read.js create mode 100644 test/parallel/test-streams-highwatermark.js create mode 100644 test/test-browser.js diff --git a/.airtap.yml b/.airtap.yml index 56fcbc7a04..6fb5d5e957 100644 --- a/.airtap.yml +++ b/.airtap.yml @@ -1,6 +1,6 @@ providers: - airtap-sauce - +sauce_connect: true browsers: - name: chrome - name: firefox @@ -15,3 +15,7 @@ presets: - name: chromium - name: firefox - name: webkit + - name: chromium + options: + launch: + channel: msedge diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f643098724..81a27407a1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,11 +9,11 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - node-version: [6.x, 8.x, 10.x, 12.x, 14.x] + node-version: [14.x, 16.x, 17.x] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }} - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} - name: npm install diff --git a/.github/workflows/sauce.yml b/.github/workflows/sauce.yml index 07d7d65586..5b63a693d1 100644 --- a/.github/workflows/sauce.yml +++ b/.github/workflows/sauce.yml @@ -5,11 +5,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v1 + uses: actions/checkout@v3 - name: Set up node - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: - node-version: 14 + node-version: 16 - name: Install run: npm install env: @@ -18,7 +18,7 @@ jobs: - name: Add host run: echo "127.0.0.1 airtap.local" | sudo tee -a /etc/hosts - name: Test - run: npm run test-browsers + run: npm run test:browsers env: SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }} SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }} diff --git a/.gitignore b/.gitignore index 2cb19adc14..0b6469a5c9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ -./lib/ +coverage/ node_modules/ node-*.tar.gz -./test/ -package-lock.json \ No newline at end of file +package-lock.json diff --git a/README.md b/README.md index 6f132562cd..f166259574 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,9 @@ npm install --save readable-stream ``` -This package is a mirror of the streams implementations in Node.js. +This package is a mirror of the streams implementations in Node.js 17.9.0. -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v17.9.0/docs/api/stream.html). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). diff --git a/build/build.mjs b/build/build.mjs index b4b3245075..3080c7ecb0 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -1,6 +1,7 @@ import { createReadStream } from 'node:fs' -import { copyFile, cp, mkdir, rm, writeFile } from 'node:fs/promises' +import { copyFile, cp, mkdir, readFile, rm, writeFile } from 'node:fs/promises' import { dirname, resolve } from 'node:path' +import process from 'node:process' import { finished } from 'node:stream/promises' import { Parse } from 'tar' import { request } from 'undici' @@ -8,7 +9,7 @@ import { aliases, skippedSources, sources } from './files.mjs' import { footers } from './footers.mjs' import { replacements } from './replacements.mjs' -const baseMatcher = /^lib|test/ +const baseMatcher = /^(?:lib|test)/ function highlightFile(file, color) { return `\x1b[${color}m${file.replace(process.cwd() + '/', '')}\x1b[0m` @@ -82,7 +83,7 @@ async function processFiles(contents) { } // Append trailers - if (matchingReplacements.length) { + if (matchingFooters.length) { modifications.push(highlightFile('footers', 33)) for (const footerKey of matchingFooters) { @@ -144,13 +145,25 @@ async function main() { tarFile = await downloadNode(nodeVersion) } - // Extract and process contents + // Extract contents const contents = await extract(nodeVersion, tarFile) - await mkdir('lib/internal/streams', { recursive: true, force: true }) - await mkdir('test/common', { recursive: true, force: true }) - await mkdir('test/parallel', { recursive: true, force: true }) + // Update Node version in README.md + replacements['README.md'][0][1] = replacements['README.md'][0][1].replace('$2', nodeVersion) + replacements['README.md'][1][1] = replacements['README.md'][1][1].replace('$2', nodeVersion) + + contents.push(['README.md', await readFile('./README.md', 'utf-8')]) + + // Create paths + const paths = new Set(contents.map((c) => dirname(c[0]))) + paths.delete('.') + for (const path of paths.values()) { + console.log(`Creating directory ${highlightFile(path, 32)} ...`) + await mkdir(path, { recursive: true, force: true }) + } + + // Perform replacements await processFiles(contents) // Copy template files @@ -163,9 +176,6 @@ async function main() { console.log(`Copying template to file ${highlightFile('lib/util.js', 32)} ...`) await copyFile('src/util.js', 'lib/util.js') - console.log(`Copying template to file ${highlightFile('lib/internal/uv-browser.js', 32)} ...`) - await copyFile('src/uv-browser.js', 'lib/internal/uv-browser.js') - console.log(`Copying template to file ${highlightFile('test/test-browser.js', 32)} ...`) await copyFile('src/test/test-browser.js', 'test/test-browser.js') @@ -175,11 +185,9 @@ async function main() { console.log(`Copying template to file ${highlightFile('test/ours', 32)} ...`) await cp('src/test/ours', 'test/ours', { recursive: true }) - // TODO@PI - // // Update Node version in README - // // processFile(readmePath, readmePath, [ - // // [readmeVersionRegex, "$1" + nodeVersion] - // // ]) + // Remove some unwanted directories + await rm('lib/internal/per_context', { recursive: true, force: true }) + await rm('lib/internal/util', { recursive: true, force: true }) } await main() diff --git a/build/files.mjs b/build/files.mjs index 8dbc03b392..45f9f14ab9 100644 --- a/build/files.mjs +++ b/build/files.mjs @@ -1,26 +1,32 @@ export const sources = [ 'lib/_stream_.+', 'lib/internal/errors.js', + 'lib/internal/per_context/primordials.js', 'lib/internal/streams/.+', - 'lib/internal/wrap_js_stream.js', - 'test/parallel/test-stream.+', - 'test/parallel/test-readable.+', + 'lib/internal/util/inspect.js', + 'lib/internal/validators.js', + 'lib/internal/js_stream_socket.js', + 'lib/stream.js', + 'lib/stream/promises.js', + 'test/common/fixtures.js', + 'test/common/fixtures.mjs', 'test/common/index.js', - 'test/common/tmpdir.js' + 'test/common/index.mjs', + 'test/common/tmpdir.js', + 'test/fixtures/[^/]+.txt', + 'test/parallel/test-readable.+', + 'test/parallel/test-stream.+' ] export const skippedSources = [ - 'test/parallel/test-stream2-httpclient-response-end.js', - 'test/parallel/test-stream-base-no-abort.js', - 'test/parallel/test-stream-preprocess.js', - 'test/parallel/test-stream-inheritance.js', - 'test/parallel/test-stream-base-prototype-accessors.js', - 'test/parallel/test-stream-base-prototype-accessors-enumerability.js', - 'test/parallel/test-stream-wrap-drain.js', - 'test/parallel/test-stream-pipeline-http2.js', - 'test/parallel/test-stream-base-typechecking.js' + 'test/parallel/test-stream-consumers.js', + 'test/parallel/test-stream-destroy.js', + 'test/parallel/test-stream-map.js', + 'test/parallel/test-stream-pipeline.js', + 'test/parallel/test-stream-readable-async-iterators.js' ] export const aliases = { - 'lib/internal/errors.js': ['lib/internal/errors-browser.js'] + 'lib/internal/per_context/primordials.js': ['lib/internal/primordials.js'], + 'lib/internal/util/inspect.js': ['lib/internal/inspect.js', 'lib/internal/inspect-browser.js'] } diff --git a/build/footers.mjs b/build/footers.mjs index ed717848ab..9e3b314f49 100644 --- a/build/footers.mjs +++ b/build/footers.mjs @@ -1,47 +1,48 @@ -const streamWritable = ` -/* replacement start */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} +const testPolyfills = ` + /* replacement start */ + if (typeof EventTarget === 'undefined') { + globalThis.EventTarget = require('event-target-shim').EventTarget; + } -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - this.next = null; - this.entry = null; - this.finish = () => { onCorkedFinish(this, state) }; -} -/* replacement end */ + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; + } + + if (typeof AbortSignal === 'undefined') { + globalThis.AbortSignal = require('abort-controller').AbortSignal; + + globalThis.AbortSignal.abort = function() { + const controller = new AbortController(); + controller.abort(); + + return controller.signal; + } + } + /* replacement end */ ` -const streamLegacy = ` -/* replacement start */ -Stream._uint8ArrayToBuffer = function(chunk) { - return Buffer.from(chunk); -} -Stream._isUint8Array = function(obj) { - return Buffer.isBuffer(obj) || obj instanceof Uint8Array; -} -/* replacement end */ +const testTicksDisableHook = ` + /* replacement start */ + process.on('beforeExit', (code) => { + hook.disable(); + }); + /* replacement end */ ` const testParallel = ` -/* replacement start */ -process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(\`test failed - exited code \${code}\`); - } -}); -/* replacement end */ + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(\`test failed - exited code \${code}\`); + } + }); + /* replacement end */ ` export const footers = { - 'lib/_stream_writable.js': streamWritable, - 'lib/internal/streams/legacy.js': streamLegacy, + 'test/common/index.js': testPolyfills, + 'test/parallel/test-stream-writable-samecb-singletick.js': testTicksDisableHook, 'test/parallel/.+': testParallel } diff --git a/build/replacements.mjs b/build/replacements.mjs index 1fd8c4ed13..b48d97714e 100644 --- a/build/replacements.mjs +++ b/build/replacements.mjs @@ -1,179 +1,372 @@ -const streamsInternalsRequireRelativeUtil = ["require\\('util'\\)", "require('../../util')"] +const legacyStreamsRequireStream = ["require\\('stream'\\)", "require('./stream')"] + +const streamsInternalsPrimordials = ['= primordials', "= require('../primordials')"] + +const streamsInternalsInspect = [ + "const { inspect } = require\\('internal/util/inspect'\\);", + "const inspect = { custom: Symbol('nodejs.util.inspect.custom') };" +] + +const streamsInternalsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('./$1')"] + +const streamsInternalsRequireRelativeUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('../../util')"] const streamsInternalsRequireRelativeInternalUtil = ["require\\('internal/util'\\)", "require('../../util')"] const streamsInternalsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] -const inspectCustom = ['inspect.custom', '"custom"'] +const streamsInternalsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./duplex')"] -const internalRequireRelativeUtil = ["require\\('util'\\)", "require('../util')"] +const streamsInternalsRequireWebStream = ["require\\('../webstreams/adapters'\\)", '{}'] -const errorsGetSystemErrorName = [ - "require\\('internal/util'\\)", +const streamsInternalNoRequireAbortController = [ + 'const \\{ AbortController \\} = .+', ` -{ - getSystemErrorName(err) { - const entry = errmap.get(err); - return entry ? entry[0] : \`Unknown system error \${err}\`; - } -} + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; + } ` ] -const errorsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] - -const errorsRequireInternalUtil = ["const (.+) = require\\('\\.\\./util'\\);", "const $1 = require('../../util');"] +const streamsInternalWeakHandler = [ + "const \\{ kWeakHandler \\} = require\\('../event_target'\\);", + "const kWeakHandler = require('../primordials').Symbol('kWeak');" +] -const errorsBinding = ["process.binding\\('uv'\\)", "require('./uv-browser')"] +const streamsInternalBlob = [ + "require\\('../blob'\\);", + ` + { + isBlob(b) { + return b instanceof Blob + } + } -const errorsBufferMaxLength = [ - "const \\{ kMaxLength \\} = process\\.binding\\('buffer'\\);", - 'const kMaxLength = 4294967296;' + const { Blob } = require('buffer'); + ` ] -const wrapBinding = [/internalBinding/, 'process.binding'] +const errorsRequireRelativeInspect = ["require\\('internal/util/inspect'\\)", "require('./inspect')"] + +const errorsRequireTty = ["require\\('internal/tty'\\).hasColors\\(\\)", 'false'] -const streamsRequireRelative = ["require\\('(_stream.+)'\\);", "require('./$1');"] +const errorsRequireCheckCaptureStackTrace = [ + 'ErrorCaptureStackTrace\\(err\\);', + ` + if (typeof ErrorCaptureStackTrace === 'function') { + ErrorCaptureStackTrace(err); + } + ` +] -const streamsRequireDeprecate = ["require\\('internal/util'\\);", "{ deprecate: require('util-deprecate') };"] +const inspectSequencesRegExp = [ + 'const strEscapeSequencesRegExp = .+', + 'const strEscapeSequencesRegExp = /[\\x00-\\x1f\\x27\\x5c\\x7f-\\x9f]/;' +] -const streamsRequireRelativeInternal = ["require\\('(internal/[^']+)'\\)", "require('./$1')"] +const inspectSequencesReplacer = [ + 'const strEscapeSequencesReplacer = .+', + 'const strEscapeSequencesReplacer = /[\\x00-\\x1f\\x27\\x5c\\x7f-\\x9f]/g' +] -const streamsRequireLegacy = [ - "const Stream = require\\('stream'\\);", - "const Stream = require('./internal/streams/legacy');" +const inspectSequencesRegExpSingle = [ + 'const strEscapeSequencesRegExpSingle = .+', + 'const strEscapeSequencesRegExpSingle = /[\\x00-\\x1f\\x5c\\x7f-\\x9f]/;' ] -const streamsRequireRelativeUtil = ["const util = require\\('util'\\);", "const util = require('./util');"] +const inspectSequencesReplacerSingle = [ + 'const strEscapeSequencesReplacerSingle = .+', + 'const strEscapeSequencesReplacerSingle = /[\\x00-\\x1f\\x5c\\x7f-\\x9f]/g;' +] -const streamsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./_stream_duplex')"] +const inspectLookBehind = ['\\(\\?[<=]', '(?:'] -const streamsWritableIsBuffer = ['Object\\.getPrototypeOf\\((chunk)\\) !== Buffer\\.prototype', '!Buffer.isBuffer($1)'] +const internalsRequireAssert = ["require\\('internal/assert'\\)", "require('assert')"] -const streamsWritableWriteRequest = [ - 'state\\.lastBufferedRequest = \\{[^}]+\\}', - 'state.lastBufferedRequest = new WriteReq(chunk, encoding, cb)' +const inspectNativeModule = [ + "require\\('internal/bootstrap/loaders'\\);", + ` + { + NativeModule: { + exists() { + return false; + } + } + } + ` ] -const streamsWritableCorkedRequest = [ - 'var corkReq = [\\s\\S]+?(\\S+?)\\.corkedRequestsFree = corkReq', - '$1.corkedRequestsFree = new CorkedRequest($1)' +const inspectIntl = ["internalBinding\\('config'\\)\\.hasIntl", 'false'] + +const inspectIcuBinding = ["internalBinding\\('icu'\\)", '{}'] + +const streamSocketInspectBinding = ['internalBinding', 'process.binding'] + +const streamSocketDebugLog = ["require\\('internal/util/debuglog'\\)", "require('../util')"] + +const inspectRequireUtil = ["internalBinding\\('util'\\)", "require('../util')"] + +const internalRequireRelativeInternalUtil = ["require\\('internal/util'\\)", "require('../util')"] + +const internalRequireRelativeInternal = ["require\\('internal/([^']+)'\\)", "require('./$1')"] + +const internalRequireAsyncHooks = ["require\\('./async_hooks'\\)", "require('internal/async_hooks')"] + +const internalPrimordials = ['= primordials', "= require('./primordials')"] + +const internalRequireRelativeTypes = ["require\\('internal/util/types'\\)", "require('../util')"] + +const internalNoCoalesceAssignment = [ + '\\s*(.+) \\?\\?= (.+)', + ` + if (typeof $1 === 'undefined') { + $1 = $2 + } + ` ] -const testCommonAsyncHooksDisableStart = ["(require\\('async_hooks'\\))", '/* $1'] +const primordialsDefine = [ + "('use strict';)", + ` + $1 -const testCommonAsyncHooksDisableEnd = ['(\\}\\).enable\\(\\);)', '$1 */'] + const primordials = module.exports = {} + ` +] -const testCommonTimer = ["process\\.binding\\('timer_wrap'\\)\\.Timer;", '{ now: function (){} };'] +const primordialsAggregateError = [ + '(= Reflect;)', + ` + $1 -const testCommonLeakedGlobals = [ - '(function leakedGlobals\\(\\) \\{)', + if (typeof AggregateError === 'undefined') { + globalThis.AggregateError = require('aggregate-error'); + } ` -/* replacement start */ -if (typeof constructor == 'function') { - knownGlobals.push(constructor); -} +] -if (typeof DTRACE_NET_SOCKET_READ == 'function') { - knownGlobals.push(DTRACE_NET_SOCKET_READ); -} +const validatorSignals = ["const \\{ signals \\} = internalBinding\\('constants'\\).os;", 'const signals = {};'] -if (typeof DTRACE_NET_SOCKET_WRITE == 'function') { - knownGlobals.push(DTRACE_NET_SOCKET_WRITE); -} +const streamIndexPrimordials = ['= primordials', "= require('./internal/primordials')"] -if (global.__coverage__ == 'function') { - knownGlobals.push(global.__coverage__); -} +const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('./util')"] -for (const item of ['queueMicrotask', 'performance']) { - if (typeof global[item] !== undefined) { - knownGlobals.push(global[item]); - } -} -/* replacement end */ +const streamIndexRequireInternalBuffer = ["require\\('internal/buffer'\\)", '{}'] -$1 -` +const streamIndexIsUint8Array = [ + "Stream._isUint8Array = require\\('internal/util/types'\\).isUint8Array;", + ` + Stream._isUint8Array = function isUint8Array(value) { + return value instanceof Uint8Array + }; + ` ] -// Following replacements on this file are for browser tests -// const testCommonHasCrypto = ['const hasCrypto = Boolean\\(process.versions.openssl\\);', 'const hasCrypto = true;'] +const streamIndexRequireInternal = ["require\\('internal/([^']+)'\\)", "require('./internal/$1')"] + +const streamIndexRequirePromises = ["require\\('stream/promises'\\);", "require('./stream/promises');"] + +const streamIndexUint8ArrayToBuffer = ['new internalBuffer.FastBuffer', 'Buffer.from'] -// const testCommonWorkerThreads = ["require\\('module'\\)\\.builtinModules\\.includes\\('worker_threads'\\)", 'false'] +const streamsPrimordials = ['= primordials', "= require('../internal/primordials')"] -// const testCommonArgv = ['process.argv.length === 2', 'false'] +const streamsRequireInternal = ["require\\('internal/(.+)'\\)", "require('../internal/$1')"] -// const testCommonCpus = ['os.cpus()', 'os.cpus().length === 0 ? [{ speed: 1000 }] : os.cpus()'] +const streamsConsumerTextDecoder = ["const \\{\\n\\s+TextDecoder,\\n\\} = require\\('../internal/encoding'\\);\\n", ''] -// const testCommonBuildType = [ -// 'const buildType = process.config.target_defaults.default_configuration;', -// "const buildType = 'readable-stream';" -// ] +const streamsConsumerNoRequireBlob = ["const \\{\\n\\s+Blob,\\n\\} = require\\('../internal/blob'\\);\\n", ''] + +const streamsConsumerRequireBlobFromBuffer = ['(\\s+Buffer,)', '$1 Blob,'] + +const webstreamPrimordials = ['= primordials', "= require('../primordials')"] + +const webstreamsRequireRelative = ["require\\('internal/webstreams/([^']+)'\\)", "require('./$1')"] + +const webstreamsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('../streams/$1')"] + +const webstreamsRequireStream = ["require\\('stream'\\)", "require('../../stream')"] + +const webstreamsRequireUtil = ["require\\('internal/util'\\)", "require('../../util')"] + +const webstreamsRequireErrorsOrValidators = ["require\\('internal/(errors|validators)'\\)", "require('../$1')"] + +const webstreamsConsumerNoRequireTextAPI = [ + "const \\{\\n\\s+TextDecoder,\\n\\s+TextEncoder,\\n\\} = require\\('internal/encoding'\\);\\n", + '' +] const testParallelIncludeTap = [ "('use strict')", + ` + $1 - `$1 - -const tap = require('tap'); -const silentConsole = { log() {}, error() {} }; -` + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ` ] const testParallelRequireStream = ["require\\('stream'\\)", "require('../../lib')"] -const testParallelRequireStreamInternals = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"] +const testParallelRequireStreamPromises = ["require\\('stream/promises'\\)", "require('../../lib/stream/promises')"] + +const testParallelRequireStreamConsumer = ["require\\('stream/consumer'\\)", "require('../../lib/stream/consumer')"] + +const testParallelRequireStreamWeb = ["require\\('stream/web'\\)", "require('../../lib/stream/web')"] + +const testParallelRequireStreamInternalsLegacy = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"] + +const testParallelRequireStreamInternals = ["require\\('(internal/.+)'\\)", "require('../../lib/$1')"] + +const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/index.js';"] -const testParallelRequireStreamClasses = [ - 'Stream.(Readable|Writable|Duplex|Transform|PassThrough)', - "require('../../lib').$1" +const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"] + +const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1'] + +const testParallelHasOwn = ['Object.hasOwn\\(', 'Reflect.has('] + +const testParallelBindings = [ + "const \\{ internalBinding \\} = require\\('../../lib/internal/test/binding'\\);", + 'const internalBinding = process.binding' ] -const testParallelPromisify = [ - "const \\{ promisify \\} = require\\('util'\\);", - "const promisify = require('util-promisify');" +const testParallelTimersPromises = [ + "const { setTimeout } = require\\('timers/promises'\\);", + ` + const st = require('timers').setTimeout; + + function setTimeout(ms) { + return new Promise(resolve => { + st(resolve, ms); + }); + } + ` ] -const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1'] +const testKnownGlobals = [ + 'let knownGlobals = \\[(\\n\\s+)', + ` + let knownGlobals = [\n + typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), + typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, + typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, + typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, + ` +] + +const testTicksReenableConsoleLog = ['silentConsole.log\\(i\\);', 'console.log(i);'] + +const testTickSaveHook = ['async_hooks.createHook\\(\\{', 'const hook = async_hooks.createHook({'] + +const testReadableBufferListInspect = [ + 'assert.strictEqual\\(\\n\\s+util.inspect\\(\\[ list \\], \\{ compact: false \\}\\),\\n\\s+`\\[\\n\\s+BufferList \\{\\n\\s+head: \\[Object\\],\\n\\s+tail: \\[Object\\],\\n\\s+length: 4\\n\\s+\\}\\n\\]`\\);', + ` + assert.strictEqual(typeof list.head, 'object'); + assert.strictEqual(typeof list.tail, 'object'); + assert.strictEqual(list.length, 4); + ` +] + +const testFinishedEvent = ["res.on\\('close", "res.on('finish"] +const testPreprocessWinLineSeparator = [ + 'assert.strictEqual\\(streamedData, modelData\\);', + "assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\\r\\n/g, '\\n') : modelData);" +] + +const testFlatMapWinLineSeparator = [ + "'xyz\\\\n'\\.repeat\\(5\\)", + "(process.platform === 'win32' ? 'xyz\\r\\n' : 'xyz\\n').repeat(5)" +] + +const readmeInfo = ['(This package is a mirror of the streams implementations in Node.js) (\\d+.\\d+.\\d+).', '$1 $2.'] + +const readmeLink = ['(\\[Node.js website\\]\\(https://nodejs.org/dist/v)(\\d+.\\d+.\\d+)', '$1$2'] export const replacements = { + 'lib/_stream.+': [legacyStreamsRequireStream], 'lib/internal/streams/.+': [ - streamsInternalsRequireRelativeUtil, + streamsInternalsPrimordials, + streamsInternalsInspect, + streamsInternalsRequireStreams, + streamsInternalsRequireRelativeUtilDebuglog, streamsInternalsRequireRelativeInternalUtil, streamsInternalsRequireInternal, - inspectCustom + streamsInternalsRequireRelativeDuplex, + streamsInternalsRequireWebStream, + streamsInternalNoRequireAbortController, + streamsInternalWeakHandler, + streamsInternalBlob + ], + 'lib/internal/errors': [errorsRequireRelativeInspect, errorsRequireTty, errorsRequireCheckCaptureStackTrace], + 'lib/internal/inspect.js': [inspectNativeModule, inspectIntl, inspectIcuBinding, inspectRequireUtil], + 'lib/internal/inspect-browser.js': [ + inspectNativeModule, + inspectIntl, + inspectIcuBinding, + inspectRequireUtil, + inspectSequencesRegExp, + inspectSequencesReplacer, + inspectSequencesRegExpSingle, + inspectSequencesReplacerSingle, + inspectLookBehind ], - 'lib/internal/[^/]+': [internalRequireRelativeUtil], - 'lib/internal/errors-browser.js': [ - errorsGetSystemErrorName, - errorsRequireInternal, - errorsRequireInternalUtil, - errorsBinding, - errorsBufferMaxLength + 'lib/internal/js_stream_socket.js': [streamSocketInspectBinding, streamSocketDebugLog], + 'lib/internal/primordials.js': [primordialsDefine, primordialsAggregateError], + 'lib/internal/validators.js': [validatorSignals], + 'lib/internal/webstreams/.+': [ + webstreamPrimordials, + webstreamsRequireRelative, + webstreamsRequireStreams, + webstreamsRequireStream, + webstreamsRequireUtil, + webstreamsRequireErrorsOrValidators, + webstreamsConsumerNoRequireTextAPI ], - 'lib/internal/wrap_js_stream.js': [wrapBinding], - 'lib/_stream_.+': [ - streamsRequireRelative, - streamsRequireDeprecate, - streamsRequireRelativeInternal, - streamsRequireLegacy, - streamsRequireRelativeUtil + // Keep this after all the rest in the same folder + 'lib/internal/(?:errors|inspect|inspect-browser|js_stream_socket|primordials|validators).js': [ + internalsRequireAssert, + internalRequireRelativeTypes, + internalRequireRelativeInternalUtil, + internalRequireRelativeInternal, + internalRequireAsyncHooks, + internalPrimordials, + internalNoCoalesceAssignment ], - 'lib/_stream_(readable|writable).js': [streamsRequireRelativeDuplex], - 'lib/_stream_writable.js': [streamsWritableIsBuffer, streamsWritableWriteRequest, streamsWritableCorkedRequest], - 'test/common/index.js': [ - testCommonAsyncHooksDisableStart, - testCommonAsyncHooksDisableEnd, - testCommonTimer, - testCommonLeakedGlobals + 'lib/stream.js': [ + streamIndexPrimordials, + streamIndexRequireInternalBuffer, + streamIndexIsUint8Array, + streamIndexUint8ArrayToBuffer, + streamIndexRequireUtil, + streamIndexRequireInternal, + streamIndexRequirePromises ], + 'lib/stream/.+': [streamsPrimordials, streamsRequireInternal], + 'lib/stream/consumers.js': [ + streamsConsumerTextDecoder, + streamsConsumerNoRequireBlob, + streamsConsumerRequireBlobFromBuffer + ], + 'test/common/index.js': [testKnownGlobals], 'test/parallel/.+': [ testParallelIncludeTap, testParallelRequireStream, + testParallelRequireStreamPromises, + testParallelRequireStreamConsumer, + testParallelRequireStreamWeb, + testParallelRequireStreamInternalsLegacy, testParallelRequireStreamInternals, - testParallelRequireStreamClasses, - testParallelPromisify, - testParallelSilentConsole - ] + testParallelImportTapInMjs, + testParallelImportStreamInMjs, + testParallelSilentConsole, + testParallelHasOwn, + testParallelBindings, + testParallelTimersPromises + ], + 'test/parallel/test-stream-finished.js': [testFinishedEvent], + 'test/parallel/test-stream-flatMap.js': [testFlatMapWinLineSeparator], + 'test/parallel/test-stream-preprocess.js': [testPreprocessWinLineSeparator], + 'test/parallel/test-stream-writable-samecb-singletick.js': [testTicksReenableConsoleLog, testTickSaveHook], + 'test/parallel/test-stream2-readable-from-list.js': [testReadableBufferListInspect], + 'README.md': [readmeInfo, readmeLink] } diff --git a/c8.json b/c8.json index 553b624ef3..ea07a2272a 100644 --- a/c8.json +++ b/c8.json @@ -1,9 +1,9 @@ { - "include": ["dist/lib"], + "include": ["lib"], "reporter": ["text", "html"], "check-coverage": true, - "branches": 75, - "functions": 75, - "lines": 75, - "statements": 75 + "branches": 50, + "functions": 50, + "lines": 50, + "statements": 50 } \ No newline at end of file diff --git a/examples/capslock-type.cjs b/examples/capslock-type.cjs index 1125e699c1..9ea5342ae8 100644 --- a/examples/capslock-type.cjs +++ b/examples/capslock-type.cjs @@ -1,3 +1,5 @@ +'use strict' + const { Transform } = require('../lib') class MyStream extends Transform { diff --git a/examples/typer.mjs b/examples/typer.mjs index 4d8f4bc0f0..e9bed80c38 100644 --- a/examples/typer.mjs +++ b/examples/typer.mjs @@ -1,4 +1,5 @@ import { createReadStream } from 'node:fs' +import process from 'node:process' import { Readable } from '../lib/index.js' const fst = createReadStream(new URL(import.meta.url).pathname) @@ -13,7 +14,7 @@ rst.on('end', function () { console.log("Every time you press a key, you will see more contents of the source file. Let's begin!\n\n") process.stdin.setRawMode(true) process.stdin.on('data', function () { - const c = rst.read(25) + const c = rst.read(100) if (!c) { return setTimeout(process.exit, 500) } diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js new file mode 100644 index 0000000000..ae4d1e0b9c --- /dev/null +++ b/lib/_stream_duplex.js @@ -0,0 +1,5 @@ +'use strict'; + +// Keep this file as an alias for the full stream module. + +module.exports = require('./stream').Duplex; diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js new file mode 100644 index 0000000000..31b74aeb3d --- /dev/null +++ b/lib/_stream_passthrough.js @@ -0,0 +1,5 @@ +'use strict'; + +// Keep this file as an alias for the full stream module. + +module.exports = require('./stream').PassThrough; diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js new file mode 100644 index 0000000000..5db2154a12 --- /dev/null +++ b/lib/_stream_readable.js @@ -0,0 +1,5 @@ +'use strict'; + +// Keep this file as an alias for the full stream module. + +module.exports = require('./stream').Readable; diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js new file mode 100644 index 0000000000..f8e475fa6e --- /dev/null +++ b/lib/_stream_transform.js @@ -0,0 +1,5 @@ +'use strict'; + +// Keep this file as an alias for the full stream module. + +module.exports = require('./stream').Transform; diff --git a/lib/_stream_wrap.js b/lib/_stream_wrap.js new file mode 100644 index 0000000000..904128a382 --- /dev/null +++ b/lib/_stream_wrap.js @@ -0,0 +1,5 @@ +'use strict'; + +module.exports = require('internal/js_stream_socket'); +process.emitWarning('The _stream_wrap module is deprecated.', + 'DeprecationWarning', 'DEP0125'); diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js new file mode 100644 index 0000000000..70cfe234dd --- /dev/null +++ b/lib/_stream_writable.js @@ -0,0 +1,5 @@ +'use strict'; + +// Keep this file as an alias for the full stream module. + +module.exports = require('./stream').Writable; diff --git a/lib/browser.js b/lib/browser.js new file mode 100644 index 0000000000..c38850733a --- /dev/null +++ b/lib/browser.js @@ -0,0 +1,38 @@ +'use strict' + +const CustomStream = require('./stream') +const promises = require('./stream/promises') +const originalDestroy = CustomStream.Readable.destroy + +module.exports = CustomStream.Readable + +// Explicit export naming is needed for ESM +module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer +module.exports._isUint8Array = CustomStream._isUint8Array +module.exports.isDisturbed = CustomStream.isDisturbed +module.exports.isErrored = CustomStream.isErrored +module.exports.isReadable = CustomStream.isReadable +module.exports.Readable = CustomStream.Readable +module.exports.Writable = CustomStream.Writable +module.exports.Duplex = CustomStream.Duplex +module.exports.Transform = CustomStream.Transform +module.exports.PassThrough = CustomStream.PassThrough +module.exports.addAbortSignal = CustomStream.addAbortSignal +module.exports.finished = CustomStream.finished +module.exports.destroy = CustomStream.destroy +module.exports.destroy = originalDestroy +module.exports.pipeline = CustomStream.pipeline +module.exports.compose = CustomStream.compose + +Object.defineProperty(CustomStream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises + } +}) + +module.exports.Stream = CustomStream.Stream + +// Allow default importing +module.exports.default = module.exports diff --git a/lib/index.js b/lib/index.js new file mode 100644 index 0000000000..e9b59ded0e --- /dev/null +++ b/lib/index.js @@ -0,0 +1,71 @@ +'use strict' + +const Stream = require('stream') + +if (Stream && process.env.READABLE_STREAM === 'disable') { + const promises = require('stream/promises') + + // Explicit export naming is needed for ESM + module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer + module.exports._isUint8Array = Stream._isUint8Array + module.exports.isDisturbed = Stream.isDisturbed + module.exports.isErrored = Stream.isErrored + module.exports.isReadable = Stream.isReadable + module.exports.Readable = Stream.Readable + module.exports.Writable = Stream.Writable + module.exports.Duplex = Stream.Duplex + module.exports.Transform = Stream.Transform + module.exports.PassThrough = Stream.PassThrough + module.exports.addAbortSignal = Stream.addAbortSignal + module.exports.finished = Stream.finished + module.exports.destroy = Stream.destroy + module.exports.pipeline = Stream.pipeline + module.exports.compose = Stream.compose + + Object.defineProperty(Stream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises + } + }) + + module.exports.Stream = Stream.Stream +} else { + const CustomStream = require('./stream') + const promises = require('./stream/promises') + const originalDestroy = CustomStream.Readable.destroy + + module.exports = CustomStream.Readable + + // Explicit export naming is needed for ESM + module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer + module.exports._isUint8Array = CustomStream._isUint8Array + module.exports.isDisturbed = CustomStream.isDisturbed + module.exports.isErrored = CustomStream.isErrored + module.exports.isReadable = CustomStream.isReadable + module.exports.Readable = CustomStream.Readable + module.exports.Writable = CustomStream.Writable + module.exports.Duplex = CustomStream.Duplex + module.exports.Transform = CustomStream.Transform + module.exports.PassThrough = CustomStream.PassThrough + module.exports.addAbortSignal = CustomStream.addAbortSignal + module.exports.finished = CustomStream.finished + module.exports.destroy = CustomStream.destroy + module.exports.destroy = originalDestroy + module.exports.pipeline = CustomStream.pipeline + module.exports.compose = CustomStream.compose + + Object.defineProperty(CustomStream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises + } + }) + + module.exports.Stream = CustomStream.Stream +} + +// Allow default importing +module.exports.default = module.exports diff --git a/lib/internal/errors.js b/lib/internal/errors.js new file mode 100644 index 0000000000..15e6c88a1e --- /dev/null +++ b/lib/internal/errors.js @@ -0,0 +1,1663 @@ +/* eslint node-core/documented-errors: "error" */ +/* eslint node-core/alphabetize-errors: "error" */ +/* eslint node-core/prefer-util-format-errors: "error" */ + +'use strict'; + +// The whole point behind this internal module is to allow Node.js to no +// longer be forced to treat every error message change as a semver-major +// change. The NodeError classes here all expose a `code` property whose +// value statically and permanently identifies the error. While the error +// message may change, the code should not. + +const { + AggregateError, + ArrayFrom, + ArrayIsArray, + ArrayPrototypeFilter, + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + Error, + ErrorCaptureStackTrace, + ErrorPrototypeToString, + JSONStringify, + MapPrototypeGet, + MathAbs, + MathMax, + Number, + NumberIsInteger, + ObjectAssign, + ObjectDefineProperty, + ObjectDefineProperties, + ObjectIsExtensible, + ObjectGetOwnPropertyDescriptor, + ObjectKeys, + ObjectPrototypeHasOwnProperty, + RangeError, + ReflectApply, + RegExpPrototypeTest, + SafeArrayIterator, + SafeMap, + SafeWeakMap, + String, + StringPrototypeEndsWith, + StringPrototypeIncludes, + StringPrototypeMatch, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeStartsWith, + StringPrototypeToLowerCase, + Symbol, + SymbolFor, + SyntaxError, + TypeError, + URIError, +} = require('./primordials'); + +const kIsNodeError = Symbol('kIsNodeError'); + +const isWindows = process.platform === 'win32'; + +const messages = new SafeMap(); +const codes = {}; + +const classRegExp = /^([A-Z][a-z0-9]*)+$/; +// Sorted by a rough estimate on most frequently used entries. +const kTypes = [ + 'string', + 'function', + 'number', + 'object', + // Accept 'Function' and 'Object' as alternative to the lower cased version. + 'Function', + 'Object', + 'boolean', + 'bigint', + 'symbol', +]; + +const MainContextError = Error; +const overrideStackTrace = new SafeWeakMap(); +const kNoOverride = Symbol('kNoOverride'); +let userStackTraceLimit; +const nodeInternalPrefix = '__node_internal_'; +const prepareStackTrace = (globalThis, error, trace) => { + // API for node internals to override error stack formatting + // without interfering with userland code. + if (overrideStackTrace.has(error)) { + const f = overrideStackTrace.get(error); + overrideStackTrace.delete(error); + return f(error, trace); + } + + const firstFrame = trace[0]?.getFunctionName(); + if (firstFrame && StringPrototypeStartsWith(firstFrame, nodeInternalPrefix)) { + for (let l = trace.length - 1; l >= 0; l--) { + const fn = trace[l]?.getFunctionName(); + if (fn && StringPrototypeStartsWith(fn, nodeInternalPrefix)) { + ArrayPrototypeSplice(trace, 0, l + 1); + break; + } + } + // `userStackTraceLimit` is the user value for `Error.stackTraceLimit`, + // it is updated at every new exception in `captureLargerStackTrace`. + if (trace.length > userStackTraceLimit) + ArrayPrototypeSplice(trace, userStackTraceLimit); + } + + const globalOverride = + maybeOverridePrepareStackTrace(globalThis, error, trace); + if (globalOverride !== kNoOverride) return globalOverride; + + // Normal error formatting: + // + // Error: Message + // at function (file) + // at file + let errorString; + if (kIsNodeError in error) { + errorString = `${error.name} [${error.code}]: ${error.message}`; + } else { + errorString = ErrorPrototypeToString(error); + } + if (trace.length === 0) { + return errorString; + } + return `${errorString}\n at ${ArrayPrototypeJoin(trace, '\n at ')}`; +}; + +const maybeOverridePrepareStackTrace = (globalThis, error, trace) => { + // Polyfill of V8's Error.prepareStackTrace API. + // https://crbug.com/v8/7848 + // `globalThis` is the global that contains the constructor which + // created `error`. + if (typeof globalThis.Error?.prepareStackTrace === 'function') { + return globalThis.Error.prepareStackTrace(error, trace); + } + // We still have legacy usage that depends on the main context's `Error` + // being used, even when the error is from a different context. + // TODO(devsnek): evaluate if this can be eventually deprecated/removed. + if (typeof MainContextError.prepareStackTrace === 'function') { + return MainContextError.prepareStackTrace(error, trace); + } + + return kNoOverride; +}; + +const aggregateTwoErrors = hideStackFrames((innerError, outerError) => { + if (innerError && outerError && innerError !== outerError) { + if (ArrayIsArray(outerError.errors)) { + // If `outerError` is already an `AggregateError`. + ArrayPrototypePush(outerError.errors, innerError); + return outerError; + } + // eslint-disable-next-line no-restricted-syntax + const err = new AggregateError(new SafeArrayIterator([ + outerError, + innerError, + ]), outerError.message); + err.code = outerError.code; + return err; + } + return innerError || outerError; +}); + +// Lazily loaded +let util; +let assert; + +let internalUtil = null; +function lazyInternalUtil() { + if (!internalUtil) { + internalUtil = require('../util'); + } + return internalUtil; +} + +let internalUtilInspect = null; +function lazyInternalUtilInspect() { + if (!internalUtilInspect) { + internalUtilInspect = require('./inspect'); + } + return internalUtilInspect; +} + +let buffer; +function lazyBuffer() { + if (buffer === undefined) + buffer = require('buffer').Buffer; + return buffer; +} + +function isErrorStackTraceLimitWritable() { + const desc = ObjectGetOwnPropertyDescriptor(Error, 'stackTraceLimit'); + if (desc === undefined) { + return ObjectIsExtensible(Error); + } + + return ObjectPrototypeHasOwnProperty(desc, 'writable') ? + desc.writable : + desc.set !== undefined; +} + +// A specialized Error that includes an additional info property with +// additional information about the error condition. +// It has the properties present in a UVException but with a custom error +// message followed by the uv error code and uv error message. +// It also has its own error code with the original uv error context put into +// `err.info`. +// The context passed into this error must have .code, .syscall and .message, +// and may have .path and .dest. +class SystemError extends Error { + constructor(key, context) { + const limit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + super(); + // Reset the limit and setting the name property. + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; + const prefix = getMessage(key, [], this); + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})`; + + if (context.path !== undefined) + message += ` ${context.path}`; + if (context.dest !== undefined) + message += ` => ${context.dest}`; + + captureLargerStackTrace(this); + + this.code = key; + + ObjectDefineProperties(this, { + [kIsNodeError]: { + value: true, + enumerable: false, + writable: false, + configurable: true, + }, + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get() { + return context.errno; + }, + set: (value) => { + context.errno = value; + }, + enumerable: true, + configurable: true, + }, + syscall: { + get() { + return context.syscall; + }, + set: (value) => { + context.syscall = value; + }, + enumerable: true, + configurable: true, + }, + }); + + if (context.path !== undefined) { + // TODO(BridgeAR): Investigate why and when the `.toString()` was + // introduced. The `path` and `dest` properties in the context seem to + // always be of type string. We should probably just remove the + // `.toString()` and `Buffer.from()` operations and set the value on the + // context as the user did. + ObjectDefineProperty(this, 'path', { + get() { + return context.path != null ? + context.path.toString() : context.path; + }, + set: (value) => { + context.path = value ? + lazyBuffer().from(value.toString()) : undefined; + }, + enumerable: true, + configurable: true + }); + } + + if (context.dest !== undefined) { + ObjectDefineProperty(this, 'dest', { + get() { + return context.dest != null ? + context.dest.toString() : context.dest; + }, + set: (value) => { + context.dest = value ? + lazyBuffer().from(value.toString()) : undefined; + }, + enumerable: true, + configurable: true + }); + } + } + + toString() { + return `${this.name} [${this.code}]: ${this.message}`; + } + + [SymbolFor('nodejs.util.inspect.custom')](recurseTimes, ctx) { + return lazyInternalUtilInspect().inspect(this, { + ...ctx, + getters: true, + customInspect: false + }); + } +} + +function makeSystemErrorWithCode(key) { + return class NodeError extends SystemError { + constructor(ctx) { + super(key, ctx); + } + }; +} + +function makeNodeErrorWithCode(Base, key) { + return function NodeError(...args) { + const limit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + const error = new Base(); + // Reset the limit and setting the name property. + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; + const message = getMessage(key, args, error); + ObjectDefineProperties(error, { + [kIsNodeError]: { + value: true, + enumerable: false, + writable: false, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + toString: { + value() { + return `${this.name} [${key}]: ${this.message}`; + }, + enumerable: false, + writable: true, + configurable: true, + }, + }); + captureLargerStackTrace(error); + error.code = key; + return error; + }; +} + +/** + * This function removes unnecessary frames from Node.js core errors. + * @template {(...args: any[]) => any} T + * @type {(fn: T) => T} + */ +function hideStackFrames(fn) { + // We rename the functions that will be hidden to cut off the stacktrace + // at the outermost one + const hidden = nodeInternalPrefix + fn.name; + ObjectDefineProperty(fn, 'name', { value: hidden }); + return fn; +} + +// Utility function for registering the error codes. Only used here. Exported +// *only* to allow for testing. +function E(sym, val, def, ...otherClasses) { + // Special case for SystemError that formats the error message differently + // The SystemErrors only have SystemError as their base classes. + messages.set(sym, val); + if (def === SystemError) { + def = makeSystemErrorWithCode(sym); + } else { + def = makeNodeErrorWithCode(def, sym); + } + + if (otherClasses.length !== 0) { + otherClasses.forEach((clazz) => { + def[clazz.name] = makeNodeErrorWithCode(clazz, sym); + }); + } + codes[sym] = def; +} + +function getMessage(key, args, self) { + const msg = messages.get(key); + + if (assert === undefined) assert = require('assert'); + + if (typeof msg === 'function') { + assert( + msg.length <= args.length, // Default options do not count. + `Code: ${key}; The provided arguments length (${args.length}) does not ` + + `match the required ones (${msg.length}).` + ); + return ReflectApply(msg, self, args); + } + + const expectedLength = + (StringPrototypeMatch(msg, /%[dfijoOs]/g) || []).length; + assert( + expectedLength === args.length, + `Code: ${key}; The provided arguments length (${args.length}) does not ` + + `match the required ones (${expectedLength}).` + ); + if (args.length === 0) + return msg; + + ArrayPrototypeUnshift(args, msg); + return ReflectApply(lazyInternalUtilInspect().format, null, args); +} + +let uvBinding; + +function lazyUv() { + if (!uvBinding) { + uvBinding = internalBinding('uv'); + } + return uvBinding; +} + +const uvUnmappedError = ['UNKNOWN', 'unknown error']; + +function uvErrmapGet(name) { + uvBinding = lazyUv(); + if (!uvBinding.errmap) { + uvBinding.errmap = uvBinding.getErrorMap(); + } + return MapPrototypeGet(uvBinding.errmap, name); +} + +const captureLargerStackTrace = hideStackFrames( + function captureLargerStackTrace(err) { + const stackTraceLimitIsWritable = isErrorStackTraceLimitWritable(); + if (stackTraceLimitIsWritable) { + userStackTraceLimit = Error.stackTraceLimit; + Error.stackTraceLimit = Infinity; + } + + if (typeof ErrorCaptureStackTrace === 'function') { + ErrorCaptureStackTrace(err); + } + + // Reset the limit + if (stackTraceLimitIsWritable) Error.stackTraceLimit = userStackTraceLimit; + + return err; + }); + +/** + * This creates an error compatible with errors produced in the C++ + * function UVException using a context object with data assembled in C++. + * The goal is to migrate them to ERR_* errors later when compatibility is + * not a concern. + * + * @param {object} ctx + * @returns {Error} + */ +const uvException = hideStackFrames(function uvException(ctx) { + const { 0: code, 1: uvmsg } = uvErrmapGet(ctx.errno) || uvUnmappedError; + let message = `${code}: ${ctx.message || uvmsg}, ${ctx.syscall}`; + + let path; + let dest; + if (ctx.path) { + path = ctx.path.toString(); + message += ` '${path}'`; + } + if (ctx.dest) { + dest = ctx.dest.toString(); + message += ` -> '${dest}'`; + } + + // Reducing the limit improves the performance significantly. We do not lose + // the stack frames due to the `captureStackTrace()` function that is called + // later. + const tmpLimit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + // Pass the message to the constructor instead of setting it on the object + // to make sure it is the same as the one created in C++ + // eslint-disable-next-line no-restricted-syntax + const err = new Error(message); + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; + + for (const prop of ObjectKeys(ctx)) { + if (prop === 'message' || prop === 'path' || prop === 'dest') { + continue; + } + err[prop] = ctx[prop]; + } + + err.code = code; + if (path) { + err.path = path; + } + if (dest) { + err.dest = dest; + } + + return captureLargerStackTrace(err); +}); + +/** + * This creates an error compatible with errors produced in the C++ + * This function should replace the deprecated + * `exceptionWithHostPort()` function. + * + * @param {number} err - A libuv error number + * @param {string} syscall + * @param {string} address + * @param {number} [port] + * @returns {Error} + */ +const uvExceptionWithHostPort = hideStackFrames( + function uvExceptionWithHostPort(err, syscall, address, port) { + const { 0: code, 1: uvmsg } = uvErrmapGet(err) || uvUnmappedError; + const message = `${syscall} ${code}: ${uvmsg}`; + let details = ''; + + if (port && port > 0) { + details = ` ${address}:${port}`; + } else if (address) { + details = ` ${address}`; + } + + // Reducing the limit improves the performance significantly. We do not + // lose the stack frames due to the `captureStackTrace()` function that + // is called later. + const tmpLimit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + // eslint-disable-next-line no-restricted-syntax + const ex = new Error(`${message}${details}`); + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; + ex.code = code; + ex.errno = err; + ex.syscall = syscall; + ex.address = address; + if (port) { + ex.port = port; + } + + return captureLargerStackTrace(ex); + }); + +/** + * This used to be util._errnoException(). + * + * @param {number} err - A libuv error number + * @param {string} syscall + * @param {string} [original] + * @returns {Error} + */ +const errnoException = hideStackFrames( + function errnoException(err, syscall, original) { + // TODO(joyeecheung): We have to use the type-checked + // getSystemErrorName(err) to guard against invalid arguments from users. + // This can be replaced with [ code ] = errmap.get(err) when this method + // is no longer exposed to user land. + if (util === undefined) util = require('util'); + const code = util.getSystemErrorName(err); + const message = original ? + `${syscall} ${code} ${original}` : `${syscall} ${code}`; + + const tmpLimit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + // eslint-disable-next-line no-restricted-syntax + const ex = new Error(message); + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; + ex.errno = err; + ex.code = code; + ex.syscall = syscall; + + return captureLargerStackTrace(ex); + }); + +/** + * Deprecated, new function is `uvExceptionWithHostPort()` + * New function added the error description directly + * from C++. this method for backwards compatibility + * @param {number} err - A libuv error number + * @param {string} syscall + * @param {string} address + * @param {number} [port] + * @param {string} [additional] + * @returns {Error} + */ +const exceptionWithHostPort = hideStackFrames( + function exceptionWithHostPort(err, syscall, address, port, additional) { + // TODO(joyeecheung): We have to use the type-checked + // getSystemErrorName(err) to guard against invalid arguments from users. + // This can be replaced with [ code ] = errmap.get(err) when this method + // is no longer exposed to user land. + if (util === undefined) util = require('util'); + const code = util.getSystemErrorName(err); + let details = ''; + if (port && port > 0) { + details = ` ${address}:${port}`; + } else if (address) { + details = ` ${address}`; + } + if (additional) { + details += ` - Local (${additional})`; + } + + // Reducing the limit improves the performance significantly. We do not + // lose the stack frames due to the `captureStackTrace()` function that + // is called later. + const tmpLimit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + // eslint-disable-next-line no-restricted-syntax + const ex = new Error(`${syscall} ${code}${details}`); + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; + ex.errno = err; + ex.code = code; + ex.syscall = syscall; + ex.address = address; + if (port) { + ex.port = port; + } + + return captureLargerStackTrace(ex); + }); + +/** + * @param {number|string} code - A libuv error number or a c-ares error code + * @param {string} syscall + * @param {string} [hostname] + * @returns {Error} + */ +const dnsException = hideStackFrames(function(code, syscall, hostname) { + let errno; + // If `code` is of type number, it is a libuv error number, else it is a + // c-ares error code. + // TODO(joyeecheung): translate c-ares error codes into numeric ones and + // make them available in a property that's not error.errno (since they + // can be in conflict with libuv error codes). Also make sure + // util.getSystemErrorName() can understand them when an being informed that + // the number is a c-ares error code. + if (typeof code === 'number') { + errno = code; + // ENOTFOUND is not a proper POSIX error, but this error has been in place + // long enough that it's not practical to remove it. + if (code === lazyUv().UV_EAI_NODATA || code === lazyUv().UV_EAI_NONAME) { + code = 'ENOTFOUND'; // Fabricated error name. + } else { + code = lazyInternalUtil().getSystemErrorName(code); + } + } + const message = `${syscall} ${code}${hostname ? ` ${hostname}` : ''}`; + // Reducing the limit improves the performance significantly. We do not lose + // the stack frames due to the `captureStackTrace()` function that is called + // later. + const tmpLimit = Error.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; + // eslint-disable-next-line no-restricted-syntax + const ex = new Error(message); + if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; + ex.errno = errno; + ex.code = code; + ex.syscall = syscall; + if (hostname) { + ex.hostname = hostname; + } + + return captureLargerStackTrace(ex); +}); + +function connResetException(msg) { + // eslint-disable-next-line no-restricted-syntax + const ex = new Error(msg); + ex.code = 'ECONNRESET'; + return ex; +} + +let maxStack_ErrorName; +let maxStack_ErrorMessage; +/** + * Returns true if `err.name` and `err.message` are equal to engine-specific + * values indicating max call stack size has been exceeded. + * "Maximum call stack size exceeded" in V8. + * + * @param {Error} err + * @returns {boolean} + */ +function isStackOverflowError(err) { + if (maxStack_ErrorMessage === undefined) { + try { + function overflowStack() { overflowStack(); } + overflowStack(); + } catch (err) { + maxStack_ErrorMessage = err.message; + maxStack_ErrorName = err.name; + } + } + + return err && err.name === maxStack_ErrorName && + err.message === maxStack_ErrorMessage; +} + +// Only use this for integers! Decimal numbers do not work with this function. +function addNumericalSeparator(val) { + let res = ''; + let i = val.length; + const start = val[0] === '-' ? 1 : 0; + for (; i >= start + 4; i -= 3) { + res = `_${StringPrototypeSlice(val, i - 3, i)}${res}`; + } + return `${StringPrototypeSlice(val, 0, i)}${res}`; +} + +// Used to enhance the stack that will be picked up by the inspector +const kEnhanceStackBeforeInspector = Symbol('kEnhanceStackBeforeInspector'); + +// These are supposed to be called only on fatal exceptions before +// the process exits. +const fatalExceptionStackEnhancers = { + beforeInspector(error) { + if (typeof error[kEnhanceStackBeforeInspector] !== 'function') { + return error.stack; + } + + try { + // Set the error.stack here so it gets picked up by the + // inspector. + error.stack = error[kEnhanceStackBeforeInspector](); + } catch { + // We are just enhancing the error. If it fails, ignore it. + } + return error.stack; + }, + afterInspector(error) { + const originalStack = error.stack; + let useColors = true; + // Some consoles do not convert ANSI escape sequences to colors, + // rather display them directly to the stdout. On those consoles, + // libuv emulates colors by intercepting stdout stream and calling + // corresponding Windows API functions for setting console colors. + // However, fatal error are handled differently and we cannot easily + // highlight them. On Windows, detecting whether a console supports + // ANSI escape sequences is not reliable. + if (process.platform === 'win32') { + const info = internalBinding('os').getOSInformation(); + const ver = ArrayPrototypeMap(StringPrototypeSplit(info[2], '.'), + Number); + if (ver[0] !== 10 || ver[2] < 14393) { + useColors = false; + } + } + const { + inspect, + inspectDefaultOptions: { + colors: defaultColors + } + } = lazyInternalUtilInspect(); + const colors = useColors && + ((internalBinding('util').guessHandleType(2) === 'TTY' && + false) || + defaultColors); + try { + return inspect(error, { + colors, + customInspect: false, + depth: MathMax(inspect.defaultOptions.depth, 5) + }); + } catch { + return originalStack; + } + } +}; + +// Ensures the printed error line is from user code. +let _kArrowMessagePrivateSymbol, _setHiddenValue; +function setArrowMessage(err, arrowMessage) { + if (!_kArrowMessagePrivateSymbol) { + ({ + arrow_message_private_symbol: _kArrowMessagePrivateSymbol, + setHiddenValue: _setHiddenValue, + } = internalBinding('util')); + } + _setHiddenValue(err, _kArrowMessagePrivateSymbol, arrowMessage); +} + +// Hide stack lines before the first user code line. +function hideInternalStackFrames(error) { + overrideStackTrace.set(error, (error, stackFrames) => { + let frames = stackFrames; + if (typeof stackFrames === 'object') { + frames = ArrayPrototypeFilter( + stackFrames, + (frm) => !StringPrototypeStartsWith(frm.getFileName() || '', + 'node:internal') + ); + } + ArrayPrototypeUnshift(frames, error); + return ArrayPrototypeJoin(frames, '\n at '); + }); +} + +// Node uses an AbortError that isn't exactly the same as the DOMException +// to make usage of the error in userland and readable-stream easier. +// It is a regular error with `.code` and `.name`. +class AbortError extends Error { + constructor(message = 'The operation was aborted', options = undefined) { + if (options !== undefined && typeof options !== 'object') { + throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options); + } + super(message, options); + this.code = 'ABORT_ERR'; + this.name = 'AbortError'; + } +} + +/** + * This creates a generic Node.js error. + * + * @param {string} message The error message. + * @param {object} errorProperties Object with additional properties to be added to the error. + * @returns {Error} + */ +const genericNodeError = hideStackFrames(function genericNodeError(message, errorProperties) { + // eslint-disable-next-line no-restricted-syntax + const err = new Error(message); + ObjectAssign(err, errorProperties); + return err; +}); + +module.exports = { + AbortError, + aggregateTwoErrors, + captureLargerStackTrace, + codes, + connResetException, + dnsException, + // This is exported only to facilitate testing. + E, + errnoException, + exceptionWithHostPort, + fatalExceptionStackEnhancers, + genericNodeError, + getMessage, + hideInternalStackFrames, + hideStackFrames, + isErrorStackTraceLimitWritable, + isStackOverflowError, + kEnhanceStackBeforeInspector, + kIsNodeError, + kNoOverride, + maybeOverridePrepareStackTrace, + overrideStackTrace, + prepareStackTrace, + setArrowMessage, + SystemError, + uvErrmapGet, + uvException, + uvExceptionWithHostPort, +}; + +// To declare an error message, use the E(sym, val, def) function above. The sym +// must be an upper case string. The val can be either a function or a string. +// The def must be an error class. +// The return value of the function must be a string. +// Examples: +// E('EXAMPLE_KEY1', 'This is the error value', Error); +// E('EXAMPLE_KEY2', (a, b) => return `${a} ${b}`, RangeError); +// +// Once an error code has been assigned, the code itself MUST NOT change and +// any given error code must never be reused to identify a different error. +// +// Any error code added here should also be added to the documentation +// +// Note: Please try to keep these in alphabetical order +// +// Note: Node.js specific errors must begin with the prefix ERR_ + +E('ERR_AMBIGUOUS_ARGUMENT', 'The "%s" argument is ambiguous. %s', TypeError); +E('ERR_ARG_NOT_ITERABLE', '%s must be iterable', TypeError); +E('ERR_ASSERTION', '%s', Error); +E('ERR_ASYNC_CALLBACK', '%s must be a function', TypeError); +E('ERR_ASYNC_TYPE', 'Invalid name for async "type": %s', TypeError); +E('ERR_BROTLI_INVALID_PARAM', '%s is not a valid Brotli parameter', RangeError); +E('ERR_BUFFER_OUT_OF_BOUNDS', + // Using a default argument here is important so the argument is not counted + // towards `Function#length`. + (name = undefined) => { + if (name) { + return `"${name}" is outside of buffer bounds`; + } + return 'Attempt to access memory outside buffer bounds'; + }, RangeError); +E('ERR_BUFFER_TOO_LARGE', + 'Cannot create a Buffer larger than %s bytes', + RangeError); +E('ERR_CANNOT_WATCH_SIGINT', 'Cannot watch for SIGINT signals', Error); +E('ERR_CHILD_CLOSED_BEFORE_REPLY', + 'Child closed before reply received', Error); +E('ERR_CHILD_PROCESS_IPC_REQUIRED', + "Forked processes must have an IPC channel, missing value 'ipc' in %s", + Error); +E('ERR_CHILD_PROCESS_STDIO_MAXBUFFER', '%s maxBuffer length exceeded', + RangeError); +E('ERR_CONSOLE_WRITABLE_STREAM', + 'Console expects a writable stream instance for %s', TypeError); +E('ERR_CONTEXT_NOT_INITIALIZED', 'context used is not initialized', Error); +E('ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED', + 'Custom engines not supported by this OpenSSL', Error); +E('ERR_CRYPTO_ECDH_INVALID_FORMAT', 'Invalid ECDH format: %s', TypeError); +E('ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY', + 'Public key is not valid for specified curve', Error); +E('ERR_CRYPTO_ENGINE_UNKNOWN', 'Engine "%s" was not found', Error); +E('ERR_CRYPTO_FIPS_FORCED', + 'Cannot set FIPS mode, it was forced with --force-fips at startup.', Error); +E('ERR_CRYPTO_FIPS_UNAVAILABLE', 'Cannot set FIPS mode in a non-FIPS build.', + Error); +E('ERR_CRYPTO_HASH_FINALIZED', 'Digest already called', Error); +E('ERR_CRYPTO_HASH_UPDATE_FAILED', 'Hash update failed', Error); +E('ERR_CRYPTO_INCOMPATIBLE_KEY', 'Incompatible %s: %s', Error); +E('ERR_CRYPTO_INCOMPATIBLE_KEY_OPTIONS', 'The selected key encoding %s %s.', + Error); +E('ERR_CRYPTO_INVALID_DIGEST', 'Invalid digest: %s', TypeError); +E('ERR_CRYPTO_INVALID_JWK', 'Invalid JWK data', TypeError); +E('ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE', + 'Invalid key object type %s, expected %s.', TypeError); +E('ERR_CRYPTO_INVALID_STATE', 'Invalid state for operation %s', Error); +E('ERR_CRYPTO_PBKDF2_ERROR', 'PBKDF2 error', Error); +E('ERR_CRYPTO_SCRYPT_INVALID_PARAMETER', 'Invalid scrypt parameter', Error); +E('ERR_CRYPTO_SCRYPT_NOT_SUPPORTED', 'Scrypt algorithm not supported', Error); +// Switch to TypeError. The current implementation does not seem right. +E('ERR_CRYPTO_SIGN_KEY_REQUIRED', 'No key provided to sign', Error); +E('ERR_DEBUGGER_ERROR', '%s', Error); +E('ERR_DEBUGGER_STARTUP_ERROR', '%s', Error); +E('ERR_DIR_CLOSED', 'Directory handle was closed', Error); +E('ERR_DIR_CONCURRENT_OPERATION', + 'Cannot do synchronous work on directory handle with concurrent ' + + 'asynchronous operations', Error); +E('ERR_DNS_SET_SERVERS_FAILED', 'c-ares failed to set servers: "%s" [%s]', + Error); +E('ERR_DOMAIN_CALLBACK_NOT_AVAILABLE', + 'A callback was registered through ' + + 'process.setUncaughtExceptionCaptureCallback(), which is mutually ' + + 'exclusive with using the `domain` module', + Error); +E('ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE', + 'The `domain` module is in use, which is mutually exclusive with calling ' + + 'process.setUncaughtExceptionCaptureCallback()', + Error); +E('ERR_ENCODING_INVALID_ENCODED_DATA', function(encoding, ret) { + this.errno = ret; + return `The encoded data was not valid for encoding ${encoding}`; +}, TypeError); +E('ERR_ENCODING_NOT_SUPPORTED', 'The "%s" encoding is not supported', + RangeError); +E('ERR_EVAL_ESM_CANNOT_PRINT', '--print cannot be used with ESM input', Error); +E('ERR_EVENT_RECURSION', 'The event "%s" is already being dispatched', Error); +E('ERR_FALSY_VALUE_REJECTION', function(reason) { + this.reason = reason; + return 'Promise was rejected with falsy value'; +}, Error); +E('ERR_FEATURE_UNAVAILABLE_ON_PLATFORM', + 'The feature %s is unavailable on the current platform' + + ', which is being used to run Node.js', + TypeError); +E('ERR_FS_CP_DIR_TO_NON_DIR', + 'Cannot overwrite directory with non-directory', SystemError); +E('ERR_FS_CP_EEXIST', 'Target already exists', SystemError); +E('ERR_FS_CP_EINVAL', 'Invalid src or dest', SystemError); +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe', SystemError); +E('ERR_FS_CP_NON_DIR_TO_DIR', + 'Cannot overwrite non-directory with directory', SystemError); +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file', SystemError); +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', + 'Cannot overwrite symlink in subdirectory of self', SystemError); +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type', SystemError); +E('ERR_FS_EISDIR', 'Path is a directory', SystemError); +E('ERR_FS_FILE_TOO_LARGE', 'File size (%s) is greater than 2 GB', RangeError); +E('ERR_FS_INVALID_SYMLINK_TYPE', + 'Symlink type must be one of "dir", "file", or "junction". Received "%s"', + Error); // Switch to TypeError. The current implementation does not seem right +E('ERR_HTTP2_ALTSVC_INVALID_ORIGIN', + 'HTTP/2 ALTSVC frames require a valid origin', TypeError); +E('ERR_HTTP2_ALTSVC_LENGTH', + 'HTTP/2 ALTSVC frames are limited to 16382 bytes', TypeError); +E('ERR_HTTP2_CONNECT_AUTHORITY', + ':authority header is required for CONNECT requests', Error); +E('ERR_HTTP2_CONNECT_PATH', + 'The :path header is forbidden for CONNECT requests', Error); +E('ERR_HTTP2_CONNECT_SCHEME', + 'The :scheme header is forbidden for CONNECT requests', Error); +E('ERR_HTTP2_GOAWAY_SESSION', + 'New streams cannot be created after receiving a GOAWAY', Error); +E('ERR_HTTP2_HEADERS_AFTER_RESPOND', + 'Cannot specify additional headers after response initiated', Error); +E('ERR_HTTP2_HEADERS_SENT', 'Response has already been initiated.', Error); +E('ERR_HTTP2_HEADER_SINGLE_VALUE', + 'Header field "%s" must only have a single value', TypeError); +E('ERR_HTTP2_INFO_STATUS_NOT_ALLOWED', + 'Informational status codes cannot be used', RangeError); +E('ERR_HTTP2_INVALID_CONNECTION_HEADERS', + 'HTTP/1 Connection specific headers are forbidden: "%s"', TypeError); +E('ERR_HTTP2_INVALID_HEADER_VALUE', + 'Invalid value "%s" for header "%s"', TypeError); +E('ERR_HTTP2_INVALID_INFO_STATUS', + 'Invalid informational status code: %s', RangeError); +E('ERR_HTTP2_INVALID_ORIGIN', + 'HTTP/2 ORIGIN frames require a valid origin', TypeError); +E('ERR_HTTP2_INVALID_PACKED_SETTINGS_LENGTH', + 'Packed settings length must be a multiple of six', RangeError); +E('ERR_HTTP2_INVALID_PSEUDOHEADER', + '"%s" is an invalid pseudoheader or is used incorrectly', TypeError); +E('ERR_HTTP2_INVALID_SESSION', 'The session has been destroyed', Error); +E('ERR_HTTP2_INVALID_SETTING_VALUE', + // Using default arguments here is important so the arguments are not counted + // towards `Function#length`. + function(name, actual, min = undefined, max = undefined) { + this.actual = actual; + if (min !== undefined) { + this.min = min; + this.max = max; + } + return `Invalid value for setting "${name}": ${actual}`; + }, TypeError, RangeError); +E('ERR_HTTP2_INVALID_STREAM', 'The stream has been destroyed', Error); +E('ERR_HTTP2_MAX_PENDING_SETTINGS_ACK', + 'Maximum number of pending settings acknowledgements', Error); +E('ERR_HTTP2_NESTED_PUSH', + 'A push stream cannot initiate another push stream.', Error); +E('ERR_HTTP2_NO_MEM', 'Out of memory', Error); +E('ERR_HTTP2_NO_SOCKET_MANIPULATION', + 'HTTP/2 sockets should not be directly manipulated (e.g. read and written)', + Error); +E('ERR_HTTP2_ORIGIN_LENGTH', + 'HTTP/2 ORIGIN frames are limited to 16382 bytes', TypeError); +E('ERR_HTTP2_OUT_OF_STREAMS', + 'No stream ID is available because maximum stream ID has been reached', + Error); +E('ERR_HTTP2_PAYLOAD_FORBIDDEN', + 'Responses with %s status must not have a payload', Error); +E('ERR_HTTP2_PING_CANCEL', 'HTTP2 ping cancelled', Error); +E('ERR_HTTP2_PING_LENGTH', 'HTTP2 ping payload must be 8 bytes', RangeError); +E('ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED', + 'Cannot set HTTP/2 pseudo-headers', TypeError); +E('ERR_HTTP2_PUSH_DISABLED', 'HTTP/2 client has disabled push streams', Error); +E('ERR_HTTP2_SEND_FILE', 'Directories cannot be sent', Error); +E('ERR_HTTP2_SEND_FILE_NOSEEK', + 'Offset or length can only be specified for regular files', Error); +E('ERR_HTTP2_SESSION_ERROR', 'Session closed with error code %s', Error); +E('ERR_HTTP2_SETTINGS_CANCEL', 'HTTP2 session settings canceled', Error); +E('ERR_HTTP2_SOCKET_BOUND', + 'The socket is already bound to an Http2Session', Error); +E('ERR_HTTP2_SOCKET_UNBOUND', + 'The socket has been disconnected from the Http2Session', Error); +E('ERR_HTTP2_STATUS_101', + 'HTTP status code 101 (Switching Protocols) is forbidden in HTTP/2', Error); +E('ERR_HTTP2_STATUS_INVALID', 'Invalid status code: %s', RangeError); +E('ERR_HTTP2_STREAM_CANCEL', function(error) { + let msg = 'The pending stream has been canceled'; + if (error) { + this.cause = error; + if (typeof error.message === 'string') + msg += ` (caused by: ${error.message})`; + } + return msg; +}, Error); +E('ERR_HTTP2_STREAM_ERROR', 'Stream closed with error code %s', Error); +E('ERR_HTTP2_STREAM_SELF_DEPENDENCY', + 'A stream cannot depend on itself', Error); +E('ERR_HTTP2_TOO_MANY_INVALID_FRAMES', 'Too many invalid HTTP/2 frames', Error); +E('ERR_HTTP2_TRAILERS_ALREADY_SENT', + 'Trailing headers have already been sent', Error); +E('ERR_HTTP2_TRAILERS_NOT_READY', + 'Trailing headers cannot be sent until after the wantTrailers event is ' + + 'emitted', Error); +E('ERR_HTTP2_UNSUPPORTED_PROTOCOL', 'protocol "%s" is unsupported.', Error); +E('ERR_HTTP_HEADERS_SENT', + 'Cannot %s headers after they are sent to the client', Error); +E('ERR_HTTP_INVALID_HEADER_VALUE', + 'Invalid value "%s" for header "%s"', TypeError); +E('ERR_HTTP_INVALID_STATUS_CODE', 'Invalid status code: %s', RangeError); +E('ERR_HTTP_REQUEST_TIMEOUT', 'Request timeout', Error); +E('ERR_HTTP_SOCKET_ENCODING', + 'Changing the socket encoding is not allowed per RFC7230 Section 3.', Error); +E('ERR_HTTP_TRAILER_INVALID', + 'Trailers are invalid with this transfer encoding', Error); +E('ERR_ILLEGAL_CONSTRUCTOR', 'Illegal constructor', TypeError); +E('ERR_IMPORT_ASSERTION_TYPE_FAILED', + 'Module "%s" is not of type "%s"', TypeError); +E('ERR_IMPORT_ASSERTION_TYPE_MISSING', + 'Module "%s" needs an import assertion of type "%s"', TypeError); +E('ERR_IMPORT_ASSERTION_TYPE_UNSUPPORTED', + 'Import assertion type "%s" is unsupported', TypeError); +E('ERR_INCOMPATIBLE_OPTION_PAIR', + 'Option "%s" cannot be used in combination with option "%s"', TypeError); +E('ERR_INPUT_TYPE_NOT_ALLOWED', '--input-type can only be used with string ' + + 'input via --eval, --print, or STDIN', Error); +E('ERR_INSPECTOR_ALREADY_ACTIVATED', + 'Inspector is already activated. Close it with inspector.close() ' + + 'before activating it again.', + Error); +E('ERR_INSPECTOR_ALREADY_CONNECTED', '%s is already connected', Error); +E('ERR_INSPECTOR_CLOSED', 'Session was closed', Error); +E('ERR_INSPECTOR_COMMAND', 'Inspector error %d: %s', Error); +E('ERR_INSPECTOR_NOT_ACTIVE', 'Inspector is not active', Error); +E('ERR_INSPECTOR_NOT_AVAILABLE', 'Inspector is not available', Error); +E('ERR_INSPECTOR_NOT_CONNECTED', 'Session is not connected', Error); +E('ERR_INSPECTOR_NOT_WORKER', 'Current thread is not a worker', Error); +E('ERR_INTERNAL_ASSERTION', (message) => { + const suffix = 'This is caused by either a bug in Node.js ' + + 'or incorrect usage of Node.js internals.\n' + + 'Please open an issue with this stack trace at ' + + 'https://github.com/nodejs/node/issues\n'; + return message === undefined ? suffix : `${message}\n${suffix}`; +}, Error); +E('ERR_INVALID_ADDRESS_FAMILY', function(addressType, host, port) { + this.host = host; + this.port = port; + return `Invalid address family: ${addressType} ${host}:${port}`; +}, RangeError); +E('ERR_INVALID_ARG_TYPE', + (name, expected, actual) => { + assert(typeof name === 'string', "'name' must be a string"); + if (!ArrayIsArray(expected)) { + expected = [expected]; + } + + let msg = 'The '; + if (StringPrototypeEndsWith(name, ' argument')) { + // For cases like 'first argument' + msg += `${name} `; + } else { + const type = StringPrototypeIncludes(name, '.') ? 'property' : 'argument'; + msg += `"${name}" ${type} `; + } + msg += 'must be '; + + const types = []; + const instances = []; + const other = []; + + for (const value of expected) { + assert(typeof value === 'string', + 'All expected entries have to be of type string'); + if (ArrayPrototypeIncludes(kTypes, value)) { + ArrayPrototypePush(types, StringPrototypeToLowerCase(value)); + } else if (RegExpPrototypeTest(classRegExp, value)) { + ArrayPrototypePush(instances, value); + } else { + assert(value !== 'object', + 'The value "object" should be written as "Object"'); + ArrayPrototypePush(other, value); + } + } + + // Special handle `object` in case other instances are allowed to outline + // the differences between each other. + if (instances.length > 0) { + const pos = ArrayPrototypeIndexOf(types, 'object'); + if (pos !== -1) { + ArrayPrototypeSplice(types, pos, 1); + ArrayPrototypePush(instances, 'Object'); + } + } + + if (types.length > 0) { + if (types.length > 2) { + const last = ArrayPrototypePop(types); + msg += `one of type ${ArrayPrototypeJoin(types, ', ')}, or ${last}`; + } else if (types.length === 2) { + msg += `one of type ${types[0]} or ${types[1]}`; + } else { + msg += `of type ${types[0]}`; + } + if (instances.length > 0 || other.length > 0) + msg += ' or '; + } + + if (instances.length > 0) { + if (instances.length > 2) { + const last = ArrayPrototypePop(instances); + msg += + `an instance of ${ArrayPrototypeJoin(instances, ', ')}, or ${last}`; + } else { + msg += `an instance of ${instances[0]}`; + if (instances.length === 2) { + msg += ` or ${instances[1]}`; + } + } + if (other.length > 0) + msg += ' or '; + } + + if (other.length > 0) { + if (other.length > 2) { + const last = ArrayPrototypePop(other); + msg += `one of ${ArrayPrototypeJoin(other, ', ')}, or ${last}`; + } else if (other.length === 2) { + msg += `one of ${other[0]} or ${other[1]}`; + } else { + if (StringPrototypeToLowerCase(other[0]) !== other[0]) + msg += 'an '; + msg += `${other[0]}`; + } + } + + if (actual == null) { + msg += `. Received ${actual}`; + } else if (typeof actual === 'function' && actual.name) { + msg += `. Received function ${actual.name}`; + } else if (typeof actual === 'object') { + if (actual.constructor && actual.constructor.name) { + msg += `. Received an instance of ${actual.constructor.name}`; + } else { + const inspected = lazyInternalUtilInspect() + .inspect(actual, { depth: -1 }); + msg += `. Received ${inspected}`; + } + } else { + let inspected = lazyInternalUtilInspect() + .inspect(actual, { colors: false }); + if (inspected.length > 25) + inspected = `${StringPrototypeSlice(inspected, 0, 25)}...`; + msg += `. Received type ${typeof actual} (${inspected})`; + } + return msg; + }, TypeError); +E('ERR_INVALID_ARG_VALUE', (name, value, reason = 'is invalid') => { + let inspected = lazyInternalUtilInspect().inspect(value); + if (inspected.length > 128) { + inspected = `${StringPrototypeSlice(inspected, 0, 128)}...`; + } + const type = StringPrototypeIncludes(name, '.') ? 'property' : 'argument'; + return `The ${type} '${name}' ${reason}. Received ${inspected}`; +}, TypeError, RangeError); +E('ERR_INVALID_ASYNC_ID', 'Invalid %s value: %s', RangeError); +E('ERR_INVALID_BUFFER_SIZE', + 'Buffer size must be a multiple of %s', RangeError); +E('ERR_INVALID_CALLBACK', + 'Callback must be a function. Received %O', TypeError); +E('ERR_INVALID_CHAR', + // Using a default argument here is important so the argument is not counted + // towards `Function#length`. + (name, field = undefined) => { + let msg = `Invalid character in ${name}`; + if (field !== undefined) { + msg += ` ["${field}"]`; + } + return msg; + }, TypeError); +E('ERR_INVALID_CURSOR_POS', + 'Cannot set cursor row without setting its column', TypeError); +E('ERR_INVALID_FD', + '"fd" must be a positive integer: %s', RangeError); +E('ERR_INVALID_FD_TYPE', 'Unsupported fd type: %s', TypeError); +E('ERR_INVALID_FILE_URL_HOST', + 'File URL host must be "localhost" or empty on %s', TypeError); +E('ERR_INVALID_FILE_URL_PATH', 'File URL path %s', TypeError); +E('ERR_INVALID_HANDLE_TYPE', 'This handle type cannot be sent', TypeError); +E('ERR_INVALID_HTTP_TOKEN', '%s must be a valid HTTP token ["%s"]', TypeError); +E('ERR_INVALID_IP_ADDRESS', 'Invalid IP address: %s', TypeError); +E('ERR_INVALID_MODULE_SPECIFIER', (request, reason, base = undefined) => { + return `Invalid module "${request}" ${reason}${base ? + ` imported from ${base}` : ''}`; +}, TypeError); +E('ERR_INVALID_PACKAGE_CONFIG', (path, base, message) => { + return `Invalid package config ${path}${base ? ` while importing ${base}` : + ''}${message ? `. ${message}` : ''}`; +}, Error); +E('ERR_INVALID_PACKAGE_TARGET', + (pkgPath, key, target, isImport = false, base = undefined) => { + const relError = typeof target === 'string' && !isImport && + target.length && !StringPrototypeStartsWith(target, './'); + if (key === '.') { + assert(isImport === false); + return `Invalid "exports" main target ${JSONStringify(target)} defined ` + + `in the package config ${pkgPath}package.json${base ? + ` imported from ${base}` : ''}${relError ? + '; targets must start with "./"' : ''}`; + } + return `Invalid "${isImport ? 'imports' : 'exports'}" target ${ + JSONStringify(target)} defined for '${key}' in the package config ${ + pkgPath}package.json${base ? ` imported from ${base}` : ''}${relError ? + '; targets must start with "./"' : ''}`; + }, Error); +E('ERR_INVALID_PROTOCOL', + 'Protocol "%s" not supported. Expected "%s"', + TypeError); +E('ERR_INVALID_REPL_EVAL_CONFIG', + 'Cannot specify both "breakEvalOnSigint" and "eval" for REPL', TypeError); +E('ERR_INVALID_REPL_INPUT', '%s', TypeError); +E('ERR_INVALID_RETURN_PROPERTY', (input, name, prop, value) => { + return `Expected a valid ${input} to be returned for the "${prop}" from the` + + ` "${name}" function but got ${value}.`; +}, TypeError); +E('ERR_INVALID_RETURN_PROPERTY_VALUE', (input, name, prop, value) => { + let type; + if (value && value.constructor && value.constructor.name) { + type = `instance of ${value.constructor.name}`; + } else { + type = `type ${typeof value}`; + } + return `Expected ${input} to be returned for the "${prop}" from the` + + ` "${name}" function but got ${type}.`; +}, TypeError); +E('ERR_INVALID_RETURN_VALUE', (input, name, value) => { + let type; + if (value && value.constructor && value.constructor.name) { + type = `instance of ${value.constructor.name}`; + } else { + type = `type ${typeof value}`; + } + return `Expected ${input} to be returned from the "${name}"` + + ` function but got ${type}.`; +}, TypeError, RangeError); +E('ERR_INVALID_STATE', 'Invalid state: %s', Error, TypeError, RangeError); +E('ERR_INVALID_SYNC_FORK_INPUT', + 'Asynchronous forks do not support ' + + 'Buffer, TypedArray, DataView or string input: %s', + TypeError); +E('ERR_INVALID_THIS', 'Value of "this" must be of type %s', TypeError); +E('ERR_INVALID_TUPLE', '%s must be an iterable %s tuple', TypeError); +E('ERR_INVALID_URI', 'URI malformed', URIError); +E('ERR_INVALID_URL', function(input) { + this.input = input; + // Don't include URL in message. + // (See https://github.com/nodejs/node/pull/38614) + return 'Invalid URL'; +}, TypeError); +E('ERR_INVALID_URL_SCHEME', + (expected) => { + if (typeof expected === 'string') + expected = [expected]; + assert(expected.length <= 2); + const res = expected.length === 2 ? + `one of scheme ${expected[0]} or ${expected[1]}` : + `of scheme ${expected[0]}`; + return `The URL must be ${res}`; + }, TypeError); +E('ERR_IPC_CHANNEL_CLOSED', 'Channel closed', Error); +E('ERR_IPC_DISCONNECTED', 'IPC channel is already disconnected', Error); +E('ERR_IPC_ONE_PIPE', 'Child process can have only one IPC pipe', Error); +E('ERR_IPC_SYNC_FORK', 'IPC cannot be used with synchronous forks', Error); +E('ERR_MANIFEST_ASSERT_INTEGRITY', + (moduleURL, realIntegrities) => { + let msg = `The content of "${ + moduleURL + }" does not match the expected integrity.`; + if (realIntegrities.size) { + const sri = ArrayPrototypeJoin( + ArrayFrom(realIntegrities.entries(), + ({ 0: alg, 1: dgs }) => `${alg}-${dgs}`), + ' ' + ); + msg += ` Integrities found are: ${sri}`; + } else { + msg += ' The resource was not found in the policy.'; + } + return msg; + }, Error); +E('ERR_MANIFEST_DEPENDENCY_MISSING', + 'Manifest resource %s does not list %s as a dependency specifier for ' + + 'conditions: %s', + Error); +E('ERR_MANIFEST_INTEGRITY_MISMATCH', + 'Manifest resource %s has multiple entries but integrity lists do not match', + SyntaxError); +E('ERR_MANIFEST_INVALID_RESOURCE_FIELD', + 'Manifest resource %s has invalid property value for %s', + TypeError); +E('ERR_MANIFEST_INVALID_SPECIFIER', + 'Manifest resource %s has invalid dependency mapping %s', + TypeError); +E('ERR_MANIFEST_TDZ', 'Manifest initialization has not yet run', Error); +E('ERR_MANIFEST_UNKNOWN_ONERROR', + 'Manifest specified unknown error behavior "%s".', + SyntaxError); +E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error); +E('ERR_MISSING_ARGS', + (...args) => { + assert(args.length > 0, 'At least one arg needs to be specified'); + let msg = 'The '; + const len = args.length; + const wrap = (a) => `"${a}"`; + args = ArrayPrototypeMap( + args, + (a) => (ArrayIsArray(a) ? + ArrayPrototypeJoin(ArrayPrototypeMap(a, wrap), ' or ') : + wrap(a)) + ); + switch (len) { + case 1: + msg += `${args[0]} argument`; + break; + case 2: + msg += `${args[0]} and ${args[1]} arguments`; + break; + default: + msg += ArrayPrototypeJoin(ArrayPrototypeSlice(args, 0, len - 1), ', '); + msg += `, and ${args[len - 1]} arguments`; + break; + } + return `${msg} must be specified`; + }, TypeError); +E('ERR_MISSING_OPTION', '%s is required', TypeError); +E('ERR_MODULE_NOT_FOUND', (path, base, type = 'package') => { + return `Cannot find ${type} '${path}' imported from ${base}`; +}, Error); +E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error); +E('ERR_NAPI_CONS_FUNCTION', 'Constructor must be a function', TypeError); +E('ERR_NAPI_INVALID_DATAVIEW_ARGS', + 'byte_offset + byte_length should be less than or equal to the size in ' + + 'bytes of the array passed in', + RangeError); +E('ERR_NAPI_INVALID_TYPEDARRAY_ALIGNMENT', + 'start offset of %s should be a multiple of %s', RangeError); +E('ERR_NAPI_INVALID_TYPEDARRAY_LENGTH', + 'Invalid typed array length', RangeError); +E('ERR_NETWORK_IMPORT_BAD_RESPONSE', + "import '%s' received a bad response: %s", Error); +E('ERR_NETWORK_IMPORT_DISALLOWED', + "import of '%s' by %s is not supported: %s", Error); +E('ERR_NO_CRYPTO', + 'Node.js is not compiled with OpenSSL crypto support', Error); +E('ERR_NO_ICU', + '%s is not supported on Node.js compiled without ICU', TypeError); +E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error, TypeError); +E('ERR_OUT_OF_RANGE', + (str, range, input, replaceDefaultBoolean = false) => { + assert(range, 'Missing "range" argument'); + let msg = replaceDefaultBoolean ? str : + `The value of "${str}" is out of range.`; + let received; + if (NumberIsInteger(input) && MathAbs(input) > 2 ** 32) { + received = addNumericalSeparator(String(input)); + } else if (typeof input === 'bigint') { + received = String(input); + if (input > 2n ** 32n || input < -(2n ** 32n)) { + received = addNumericalSeparator(received); + } + received += 'n'; + } else { + received = lazyInternalUtilInspect().inspect(input); + } + msg += ` It must be ${range}. Received ${received}`; + return msg; + }, RangeError); +E('ERR_PACKAGE_IMPORT_NOT_DEFINED', (specifier, packagePath, base) => { + return `Package import specifier "${specifier}" is not defined${packagePath ? + ` in package ${packagePath}package.json` : ''} imported from ${base}`; +}, TypeError); +E('ERR_PACKAGE_PATH_NOT_EXPORTED', (pkgPath, subpath, base = undefined) => { + if (subpath === '.') + return `No "exports" main defined in ${pkgPath}package.json${base ? + ` imported from ${base}` : ''}`; + return `Package subpath '${subpath}' is not defined by "exports" in ${ + pkgPath}package.json${base ? ` imported from ${base}` : ''}`; +}, Error); +E('ERR_PERFORMANCE_INVALID_TIMESTAMP', + '%d is not a valid timestamp', TypeError); +E('ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS', '%s', TypeError); +E('ERR_REQUIRE_ESM', + function(filename, hasEsmSyntax, parentPath = null, packageJsonPath = null) { + hideInternalStackFrames(this); + let msg = `require() of ES Module ${filename}${parentPath ? ` from ${ + parentPath}` : ''} not supported.`; + if (!packageJsonPath) { + if (StringPrototypeEndsWith(filename, '.mjs')) + msg += `\nInstead change the require of ${filename} to a dynamic ` + + 'import() which is available in all CommonJS modules.'; + return msg; + } + const path = require('path'); + const basename = parentPath && path.basename(filename) === + path.basename(parentPath) ? filename : path.basename(filename); + if (hasEsmSyntax) { + msg += `\nInstead change the require of ${basename} in ${parentPath} to` + + ' a dynamic import() which is available in all CommonJS modules.'; + return msg; + } + msg += `\n${basename} is treated as an ES module file as it is a .js ` + + 'file whose nearest parent package.json contains "type": "module" ' + + 'which declares all .js files in that package scope as ES modules.' + + `\nInstead rename ${basename} to end in .cjs, change the requiring ` + + 'code to use dynamic import() which is available in all CommonJS ' + + 'modules, or change "type": "module" to "type": "commonjs" in ' + + `${packageJsonPath} to treat all .js files as CommonJS (using .mjs for ` + + 'all ES modules instead).\n'; + return msg; + }, Error); +E('ERR_SCRIPT_EXECUTION_INTERRUPTED', + 'Script execution was interrupted by `SIGINT`', Error); +E('ERR_SERVER_ALREADY_LISTEN', + 'Listen method has been called more than once without closing.', Error); +E('ERR_SERVER_NOT_RUNNING', 'Server is not running.', Error); +E('ERR_SOCKET_ALREADY_BOUND', 'Socket is already bound', Error); +E('ERR_SOCKET_BAD_BUFFER_SIZE', + 'Buffer size must be a positive integer', TypeError); +E('ERR_SOCKET_BAD_PORT', (name, port, allowZero = true) => { + assert(typeof allowZero === 'boolean', + "The 'allowZero' argument must be of type boolean."); + const operator = allowZero ? '>=' : '>'; + return `${name} should be ${operator} 0 and < 65536. Received ${port}.`; +}, RangeError); +E('ERR_SOCKET_BAD_TYPE', + 'Bad socket type specified. Valid types are: udp4, udp6', TypeError); +E('ERR_SOCKET_BUFFER_SIZE', + 'Could not get or set buffer size', + SystemError); +E('ERR_SOCKET_CLOSED', 'Socket is closed', Error); +E('ERR_SOCKET_DGRAM_IS_CONNECTED', 'Already connected', Error); +E('ERR_SOCKET_DGRAM_NOT_CONNECTED', 'Not connected', Error); +E('ERR_SOCKET_DGRAM_NOT_RUNNING', 'Not running', Error); +E('ERR_SRI_PARSE', + 'Subresource Integrity string %j had an unexpected %j at position %d', + SyntaxError); +E('ERR_STREAM_ALREADY_FINISHED', + 'Cannot call %s after a stream was finished', + Error); +E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error); +E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error); +E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error); +E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error); +E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', + 'stream.unshift() after end event', Error); +E('ERR_STREAM_WRAP', 'Stream has StringDecoder set or is in objectMode', Error); +E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error); +E('ERR_SYNTHETIC', 'JavaScript Callstack', Error); +E('ERR_SYSTEM_ERROR', 'A system error occurred', SystemError); +E('ERR_TLS_CERT_ALTNAME_FORMAT', 'Invalid subject alternative name string', + SyntaxError); +E('ERR_TLS_CERT_ALTNAME_INVALID', function(reason, host, cert) { + this.reason = reason; + this.host = host; + this.cert = cert; + return `Hostname/IP does not match certificate's altnames: ${reason}`; +}, Error); +E('ERR_TLS_DH_PARAM_SIZE', 'DH parameter size %s is less than 2048', Error); +E('ERR_TLS_HANDSHAKE_TIMEOUT', 'TLS handshake timeout', Error); +E('ERR_TLS_INVALID_CONTEXT', '%s must be a SecureContext', TypeError); +E('ERR_TLS_INVALID_PROTOCOL_VERSION', + '%j is not a valid %s TLS protocol version', TypeError); +E('ERR_TLS_INVALID_STATE', 'TLS socket connection must be securely established', + Error); +E('ERR_TLS_PROTOCOL_VERSION_CONFLICT', + 'TLS protocol version %j conflicts with secureProtocol %j', TypeError); +E('ERR_TLS_RENEGOTIATION_DISABLED', + 'TLS session renegotiation disabled for this socket', Error); + +// This should probably be a `TypeError`. +E('ERR_TLS_REQUIRED_SERVER_NAME', + '"servername" is required parameter for Server.addContext', Error); +E('ERR_TLS_SESSION_ATTACK', 'TLS session renegotiation attack detected', Error); +E('ERR_TLS_SNI_FROM_SERVER', + 'Cannot issue SNI from a TLS server-side socket', Error); +E('ERR_TRACE_EVENTS_CATEGORY_REQUIRED', + 'At least one category is required', TypeError); +E('ERR_TRACE_EVENTS_UNAVAILABLE', 'Trace events are unavailable', Error); + +// This should probably be a `RangeError`. +E('ERR_TTY_INIT_FAILED', 'TTY initialization failed', SystemError); +E('ERR_UNAVAILABLE_DURING_EXIT', 'Cannot call function in process exit ' + + 'handler', Error); +E('ERR_UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET', + '`process.setupUncaughtExceptionCapture()` was called while a capture ' + + 'callback was already active', + Error); +E('ERR_UNESCAPED_CHARACTERS', '%s contains unescaped characters', TypeError); +E('ERR_UNHANDLED_ERROR', + // Using a default argument here is important so the argument is not counted + // towards `Function#length`. + (err = undefined) => { + const msg = 'Unhandled error.'; + if (err === undefined) return msg; + return `${msg} (${err})`; + }, Error); +E('ERR_UNKNOWN_BUILTIN_MODULE', 'No such built-in module: %s', Error); +E('ERR_UNKNOWN_CREDENTIAL', '%s identifier does not exist: %s', Error); +E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError); +E('ERR_UNKNOWN_FILE_EXTENSION', (ext, path, suggestion) => { + let msg = `Unknown file extension "${ext}" for ${path}`; + if (suggestion) { + msg += `. ${suggestion}`; + } + return msg; +}, TypeError); +E('ERR_UNKNOWN_MODULE_FORMAT', 'Unknown module format: %s for URL %s', + RangeError); +E('ERR_UNKNOWN_SIGNAL', 'Unknown signal: %s', TypeError); +E('ERR_UNSUPPORTED_DIR_IMPORT', "Directory import '%s' is not supported " + +'resolving ES modules imported from %s', Error); +E('ERR_UNSUPPORTED_ESM_URL_SCHEME', (url, supported) => { + let msg = `Only URLs with a scheme in: ${ArrayPrototypeJoin(supported, ', ')} are supported by the default ESM loader`; + if (isWindows && url.protocol.length === 2) { + msg += + '. On Windows, absolute paths must be valid file:// URLs'; + } + msg += `. Received protocol '${url.protocol}'`; + return msg; +}, Error); + +// This should probably be a `TypeError`. +E('ERR_VALID_PERFORMANCE_ENTRY_TYPE', + 'At least one valid performance entry type is required', Error); +E('ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING', + 'A dynamic import callback was not specified.', TypeError); +E('ERR_VM_MODULE_ALREADY_LINKED', 'Module has already been linked', Error); +E('ERR_VM_MODULE_CANNOT_CREATE_CACHED_DATA', + 'Cached data cannot be created for a module which has been evaluated', Error); +E('ERR_VM_MODULE_DIFFERENT_CONTEXT', + 'Linked modules must use the same context', Error); +E('ERR_VM_MODULE_LINKING_ERRORED', + 'Linking has already failed for the provided module', Error); +E('ERR_VM_MODULE_NOT_MODULE', + 'Provided module is not an instance of Module', Error); +E('ERR_VM_MODULE_STATUS', 'Module status %s', Error); +E('ERR_WASI_ALREADY_STARTED', 'WASI instance has already started', Error); +E('ERR_WORKER_INIT_FAILED', 'Worker initialization failure: %s', Error); +E('ERR_WORKER_INVALID_EXEC_ARGV', (errors, msg = 'invalid execArgv flags') => + `Initiated Worker with ${msg}: ${ArrayPrototypeJoin(errors, ', ')}`, + Error); +E('ERR_WORKER_NOT_RUNNING', 'Worker instance not running', Error); +E('ERR_WORKER_OUT_OF_MEMORY', + 'Worker terminated due to reaching memory limit: %s', Error); +E('ERR_WORKER_PATH', (filename) => + 'The worker script or module filename must be an absolute path or a ' + + 'relative path starting with \'./\' or \'../\'.' + + (StringPrototypeStartsWith(filename, 'file://') ? + ' Wrap file:// URLs with `new URL`.' : '' + ) + + (StringPrototypeStartsWith(filename, 'data:text/javascript') ? + ' Wrap data: URLs with `new URL`.' : '' + ) + + ` Received "${filename}"`, + TypeError); +E('ERR_WORKER_UNSERIALIZABLE_ERROR', + 'Serializing an uncaught exception failed', Error); +E('ERR_WORKER_UNSUPPORTED_OPERATION', + '%s is not supported in workers', TypeError); +E('ERR_ZLIB_INITIALIZATION_FAILED', 'Initialization failed', Error); diff --git a/lib/internal/inspect-browser.js b/lib/internal/inspect-browser.js new file mode 100644 index 0000000000..7eb1bb42c4 --- /dev/null +++ b/lib/internal/inspect-browser.js @@ -0,0 +1,2299 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + ArrayPrototypeFilter, + ArrayPrototypeForEach, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSort, + ArrayPrototypeUnshift, + BigIntPrototypeValueOf, + BooleanPrototypeValueOf, + DatePrototypeGetTime, + DatePrototypeToISOString, + DatePrototypeToString, + ErrorPrototypeToString, + FunctionPrototypeCall, + FunctionPrototypeToString, + JSONStringify, + MapPrototypeGetSize, + MapPrototypeEntries, + MathFloor, + MathMax, + MathMin, + MathRound, + MathSqrt, + MathTrunc, + Number, + NumberIsFinite, + NumberIsNaN, + NumberParseFloat, + NumberParseInt, + NumberPrototypeValueOf, + Object, + ObjectAssign, + ObjectCreate, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertyNames, + ObjectGetOwnPropertySymbols, + ObjectGetPrototypeOf, + ObjectIs, + ObjectKeys, + ObjectPrototypeHasOwnProperty, + ObjectPrototypePropertyIsEnumerable, + ObjectSeal, + ObjectSetPrototypeOf, + ReflectOwnKeys, + RegExp, + RegExpPrototypeTest, + RegExpPrototypeToString, + SafeStringIterator, + SafeMap, + SafeSet, + SetPrototypeGetSize, + SetPrototypeValues, + String, + StringPrototypeCharCodeAt, + StringPrototypeCodePointAt, + StringPrototypeIncludes, + StringPrototypeNormalize, + StringPrototypePadEnd, + StringPrototypePadStart, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeTrim, + StringPrototypeValueOf, + SymbolPrototypeToString, + SymbolPrototypeValueOf, + SymbolIterator, + SymbolToStringTag, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeGetSymbolToStringTag, + Uint8Array, + globalThis, + uncurryThis, +} = require('./primordials'); + +const { + getOwnNonIndexProperties, + getPromiseDetails, + getProxyDetails, + kPending, + kRejected, + previewEntries, + getConstructorName: internalGetConstructorName, + getExternalValue, + propertyFilter: { + ALL_PROPERTIES, + ONLY_ENUMERABLE + } +} = require('../util'); + +const { + customInspectSymbol, + isError, + join, + removeColors +} = require('../util'); + +const { + codes: { + ERR_INVALID_ARG_TYPE + }, + isStackOverflowError +} = require('./errors'); + +const { + isAsyncFunction, + isGeneratorFunction, + isAnyArrayBuffer, + isArrayBuffer, + isArgumentsObject, + isBoxedPrimitive, + isDataView, + isExternal, + isMap, + isMapIterator, + isModuleNamespaceObject, + isNativeError, + isPromise, + isSet, + isSetIterator, + isWeakMap, + isWeakSet, + isRegExp, + isDate, + isTypedArray, + isStringObject, + isNumberObject, + isBooleanObject, + isBigIntObject, +} = require('../util'); + +const assert = require('assert'); + +const { NativeModule } = + { + NativeModule: { + exists() { + return false; + } + } + } + +const { + validateObject, + validateString, +} = require('./validators'); + +let hexSlice; + +const builtInObjects = new SafeSet( + ArrayPrototypeFilter( + ObjectGetOwnPropertyNames(globalThis), + (e) => RegExpPrototypeTest(/^[A-Z][a-zA-Z0-9]+$/, e) + ) +); + +// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot +const isUndetectableObject = (v) => typeof v === 'undefined' && v !== undefined; + +// These options must stay in sync with `getUserOptions`. So if any option will +// be added or removed, `getUserOptions` must also be updated accordingly. +const inspectDefaultOptions = ObjectSeal({ + showHidden: false, + depth: 2, + colors: false, + customInspect: true, + showProxy: false, + maxArrayLength: 100, + maxStringLength: 10000, + breakLength: 80, + compact: 3, + sorted: false, + getters: false, + numericSeparator: false, +}); + +const kObjectType = 0; +const kArrayType = 1; +const kArrayExtrasType = 2; + +/* eslint-disable no-control-regex */ +const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c\x7f-\x9f]/; +const strEscapeSequencesReplacer = /[\x00-\x1f\x27\x5c\x7f-\x9f]/g +const strEscapeSequencesRegExpSingle = /[\x00-\x1f\x5c\x7f-\x9f]/; +const strEscapeSequencesReplacerSingle = /[\x00-\x1f\x5c\x7f-\x9f]/g; +/* eslint-enable no-control-regex */ + +const keyStrRegExp = /^[a-zA-Z_][a-zA-Z_0-9]*$/; +const numberRegExp = /^(0|[1-9][0-9]*)$/; + +const coreModuleRegExp = /^ {4}at (?:[^/\\(]+ \(|)node:(.+):\d+:\d+\)?$/; +const nodeModulesRegExp = /[/\\]node_modules[/\\](.+?)(?:[/\\])/g; + +const classRegExp = /^(\s+[^(]*?)\s*{/; +// eslint-disable-next-line node-core/no-unescaped-regexp-dot +const stripCommentsRegExp = /(\/\/.*?\n)|(\/\*(.|\n)*?\*\/)/g; + +const kMinLineLength = 16; + +// Constants to map the iterator state. +const kWeak = 0; +const kIterator = 1; +const kMapEntries = 2; + +// Escaped control characters (plus the single quote and the backslash). Use +// empty strings to fill up unused entries. +const meta = [ + '\\x00', '\\x01', '\\x02', '\\x03', '\\x04', '\\x05', '\\x06', '\\x07', // x07 + '\\b', '\\t', '\\n', '\\x0B', '\\f', '\\r', '\\x0E', '\\x0F', // x0F + '\\x10', '\\x11', '\\x12', '\\x13', '\\x14', '\\x15', '\\x16', '\\x17', // x17 + '\\x18', '\\x19', '\\x1A', '\\x1B', '\\x1C', '\\x1D', '\\x1E', '\\x1F', // x1F + '', '', '', '', '', '', '', "\\'", '', '', '', '', '', '', '', '', // x2F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x3F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x4F + '', '', '', '', '', '', '', '', '', '', '', '', '\\\\', '', '', '', // x5F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x6F + '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '\\x7F', // x7F + '\\x80', '\\x81', '\\x82', '\\x83', '\\x84', '\\x85', '\\x86', '\\x87', // x87 + '\\x88', '\\x89', '\\x8A', '\\x8B', '\\x8C', '\\x8D', '\\x8E', '\\x8F', // x8F + '\\x90', '\\x91', '\\x92', '\\x93', '\\x94', '\\x95', '\\x96', '\\x97', // x97 + '\\x98', '\\x99', '\\x9A', '\\x9B', '\\x9C', '\\x9D', '\\x9E', '\\x9F', // x9F +]; + +// Regex used for ansi escape code splitting +// Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js +// License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore +// Matches all ansi escape code sequences in a string +const ansiPattern = '[\\u001B\\u009B][[\\]()#;?]*' + + '(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*' + + '|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)' + + '|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'; +const ansi = new RegExp(ansiPattern, 'g'); + +let getStringWidth; + +function getUserOptions(ctx, isCrossContext) { + const ret = { + stylize: ctx.stylize, + showHidden: ctx.showHidden, + depth: ctx.depth, + colors: ctx.colors, + customInspect: ctx.customInspect, + showProxy: ctx.showProxy, + maxArrayLength: ctx.maxArrayLength, + maxStringLength: ctx.maxStringLength, + breakLength: ctx.breakLength, + compact: ctx.compact, + sorted: ctx.sorted, + getters: ctx.getters, + numericSeparator: ctx.numericSeparator, + ...ctx.userOptions + }; + + // Typically, the target value will be an instance of `Object`. If that is + // *not* the case, the object may come from another vm.Context, and we want + // to avoid passing it objects from this Context in that case, so we remove + // the prototype from the returned object itself + the `stylize()` function, + // and remove all other non-primitives, including non-primitive user options. + if (isCrossContext) { + ObjectSetPrototypeOf(ret, null); + for (const key of ObjectKeys(ret)) { + if ((typeof ret[key] === 'object' || typeof ret[key] === 'function') && + ret[key] !== null) { + delete ret[key]; + } + } + ret.stylize = ObjectSetPrototypeOf((value, flavour) => { + let stylized; + try { + stylized = `${ctx.stylize(value, flavour)}`; + } catch { + // Continue regardless of error. + } + + if (typeof stylized !== 'string') return value; + // `stylized` is a string as it should be, which is safe to pass along. + return stylized; + }, null); + } + + return ret; +} + +/** + * Echos the value of any input. Tries to print the value out + * in the best way possible given the different types. + * + * @param {any} value The value to print out. + * @param {object} opts Optional options object that alters the output. + */ +/* Legacy: value, showHidden, depth, colors */ +function inspect(value, opts) { + // Default options + const ctx = { + budget: {}, + indentationLvl: 0, + seen: [], + currentDepth: 0, + stylize: stylizeNoColor, + showHidden: inspectDefaultOptions.showHidden, + depth: inspectDefaultOptions.depth, + colors: inspectDefaultOptions.colors, + customInspect: inspectDefaultOptions.customInspect, + showProxy: inspectDefaultOptions.showProxy, + maxArrayLength: inspectDefaultOptions.maxArrayLength, + maxStringLength: inspectDefaultOptions.maxStringLength, + breakLength: inspectDefaultOptions.breakLength, + compact: inspectDefaultOptions.compact, + sorted: inspectDefaultOptions.sorted, + getters: inspectDefaultOptions.getters, + numericSeparator: inspectDefaultOptions.numericSeparator, + }; + if (arguments.length > 1) { + // Legacy... + if (arguments.length > 2) { + if (arguments[2] !== undefined) { + ctx.depth = arguments[2]; + } + if (arguments.length > 3 && arguments[3] !== undefined) { + ctx.colors = arguments[3]; + } + } + // Set user-specified options + if (typeof opts === 'boolean') { + ctx.showHidden = opts; + } else if (opts) { + const optKeys = ObjectKeys(opts); + for (let i = 0; i < optKeys.length; ++i) { + const key = optKeys[i]; + // TODO(BridgeAR): Find a solution what to do about stylize. Either make + // this function public or add a new API with a similar or better + // functionality. + if ( + ObjectPrototypeHasOwnProperty(inspectDefaultOptions, key) || + key === 'stylize') { + ctx[key] = opts[key]; + } else if (ctx.userOptions === undefined) { + // This is required to pass through the actual user input. + ctx.userOptions = opts; + } + } + } + } + if (ctx.colors) ctx.stylize = stylizeWithColor; + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + return formatValue(ctx, value, 0); +} +inspect.custom = customInspectSymbol; + +ObjectDefineProperty(inspect, 'defaultOptions', { + get() { + return inspectDefaultOptions; + }, + set(options) { + validateObject(options, 'options'); + return ObjectAssign(inspectDefaultOptions, options); + } +}); + +// Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics +// Each color consists of an array with the color code as first entry and the +// reset code as second entry. +const defaultFG = 39; +const defaultBG = 49; +inspect.colors = ObjectAssign(ObjectCreate(null), { + reset: [0, 0], + bold: [1, 22], + dim: [2, 22], // Alias: faint + italic: [3, 23], + underline: [4, 24], + blink: [5, 25], + // Swap foreground and background colors + inverse: [7, 27], // Alias: swapcolors, swapColors + hidden: [8, 28], // Alias: conceal + strikethrough: [9, 29], // Alias: strikeThrough, crossedout, crossedOut + doubleunderline: [21, 24], // Alias: doubleUnderline + black: [30, defaultFG], + red: [31, defaultFG], + green: [32, defaultFG], + yellow: [33, defaultFG], + blue: [34, defaultFG], + magenta: [35, defaultFG], + cyan: [36, defaultFG], + white: [37, defaultFG], + bgBlack: [40, defaultBG], + bgRed: [41, defaultBG], + bgGreen: [42, defaultBG], + bgYellow: [43, defaultBG], + bgBlue: [44, defaultBG], + bgMagenta: [45, defaultBG], + bgCyan: [46, defaultBG], + bgWhite: [47, defaultBG], + framed: [51, 54], + overlined: [53, 55], + gray: [90, defaultFG], // Alias: grey, blackBright + redBright: [91, defaultFG], + greenBright: [92, defaultFG], + yellowBright: [93, defaultFG], + blueBright: [94, defaultFG], + magentaBright: [95, defaultFG], + cyanBright: [96, defaultFG], + whiteBright: [97, defaultFG], + bgGray: [100, defaultBG], // Alias: bgGrey, bgBlackBright + bgRedBright: [101, defaultBG], + bgGreenBright: [102, defaultBG], + bgYellowBright: [103, defaultBG], + bgBlueBright: [104, defaultBG], + bgMagentaBright: [105, defaultBG], + bgCyanBright: [106, defaultBG], + bgWhiteBright: [107, defaultBG], +}); + +function defineColorAlias(target, alias) { + ObjectDefineProperty(inspect.colors, alias, { + get() { + return this[target]; + }, + set(value) { + this[target] = value; + }, + configurable: true, + enumerable: false + }); +} + +defineColorAlias('gray', 'grey'); +defineColorAlias('gray', 'blackBright'); +defineColorAlias('bgGray', 'bgGrey'); +defineColorAlias('bgGray', 'bgBlackBright'); +defineColorAlias('dim', 'faint'); +defineColorAlias('strikethrough', 'crossedout'); +defineColorAlias('strikethrough', 'strikeThrough'); +defineColorAlias('strikethrough', 'crossedOut'); +defineColorAlias('hidden', 'conceal'); +defineColorAlias('inverse', 'swapColors'); +defineColorAlias('inverse', 'swapcolors'); +defineColorAlias('doubleunderline', 'doubleUnderline'); + +// TODO(BridgeAR): Add function style support for more complex styles. +// Don't use 'blue' not visible on cmd.exe +inspect.styles = ObjectAssign(ObjectCreate(null), { + special: 'cyan', + number: 'yellow', + bigint: 'yellow', + boolean: 'yellow', + undefined: 'grey', + null: 'bold', + string: 'green', + symbol: 'green', + date: 'magenta', + // "name": intentionally not styling + // TODO(BridgeAR): Highlight regular expressions properly. + regexp: 'red', + module: 'underline' +}); + +function addQuotes(str, quotes) { + if (quotes === -1) { + return `"${str}"`; + } + if (quotes === -2) { + return `\`${str}\``; + } + return `'${str}'`; +} + +function escapeFn(str) { + const charCode = StringPrototypeCharCodeAt(str); + return meta.length > charCode ? meta[charCode] : `\\u${charCode.toString(16)}`; +} + +// Escape control characters, single quotes and the backslash. +// This is similar to JSON stringify escaping. +function strEscape(str) { + let escapeTest = strEscapeSequencesRegExp; + let escapeReplace = strEscapeSequencesReplacer; + let singleQuote = 39; + + // Check for double quotes. If not present, do not escape single quotes and + // instead wrap the text in double quotes. If double quotes exist, check for + // backticks. If they do not exist, use those as fallback instead of the + // double quotes. + if (StringPrototypeIncludes(str, "'")) { + // This invalidates the charCode and therefore can not be matched for + // anymore. + if (!StringPrototypeIncludes(str, '"')) { + singleQuote = -1; + } else if (!StringPrototypeIncludes(str, '`') && + !StringPrototypeIncludes(str, '${')) { + singleQuote = -2; + } + if (singleQuote !== 39) { + escapeTest = strEscapeSequencesRegExpSingle; + escapeReplace = strEscapeSequencesReplacerSingle; + } + } + + // Some magic numbers that worked out fine while benchmarking with v8 6.0 + if (str.length < 5000 && !RegExpPrototypeTest(escapeTest, str)) + return addQuotes(str, singleQuote); + if (str.length > 100) { + str = StringPrototypeReplace(str, escapeReplace, escapeFn); + return addQuotes(str, singleQuote); + } + + let result = ''; + let last = 0; + for (let i = 0; i < str.length; i++) { + const point = StringPrototypeCharCodeAt(str, i); + if (point === singleQuote || + point === 92 || + point < 32 || + (point > 126 && point < 160)) { + if (last === i) { + result += meta[point]; + } else { + result += `${StringPrototypeSlice(str, last, i)}${meta[point]}`; + } + last = i + 1; + } else if (point >= 0xd800 && point <= 0xdfff) { + if (point <= 0xdbff && i + 1 < str.length) { + const point = StringPrototypeCharCodeAt(str, i + 1); + if (point >= 0xdc00 && point <= 0xdfff) { + i++; + continue; + } + } + result += `${StringPrototypeSlice(str, last, i)}${`\\u${point.toString(16)}`}`; + last = i + 1; + } + } + + if (last !== str.length) { + result += StringPrototypeSlice(str, last); + } + return addQuotes(result, singleQuote); +} + +function stylizeWithColor(str, styleType) { + const style = inspect.styles[styleType]; + if (style !== undefined) { + const color = inspect.colors[style]; + if (color !== undefined) + return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; + } + return str; +} + +function stylizeNoColor(str) { + return str; +} + +// Return a new empty array to push in the results of the default formatter. +function getEmptyFormatArray() { + return []; +} + +function isInstanceof(object, proto) { + try { + return object instanceof proto; + } catch { + return false; + } +} + +function getConstructorName(obj, ctx, recurseTimes, protoProps) { + let firstProto; + const tmp = obj; + while (obj || isUndetectableObject(obj)) { + const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); + if (descriptor !== undefined && + typeof descriptor.value === 'function' && + descriptor.value.name !== '' && + isInstanceof(tmp, descriptor.value)) { + if (protoProps !== undefined && + (firstProto !== obj || + !builtInObjects.has(descriptor.value.name))) { + addPrototypeProperties( + ctx, tmp, firstProto || tmp, recurseTimes, protoProps); + } + return descriptor.value.name; + } + + obj = ObjectGetPrototypeOf(obj); + if (firstProto === undefined) { + firstProto = obj; + } + } + + if (firstProto === null) { + return null; + } + + const res = internalGetConstructorName(tmp); + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + return `${res} `; + } + + const protoConstr = getConstructorName( + firstProto, ctx, recurseTimes + 1, protoProps); + + if (protoConstr === null) { + return `${res} <${inspect(firstProto, { + ...ctx, + customInspect: false, + depth: -1 + })}>`; + } + + return `${res} <${protoConstr}>`; +} + +// This function has the side effect of adding prototype properties to the +// `output` argument (which is an array). This is intended to highlight user +// defined prototype properties. +function addPrototypeProperties(ctx, main, obj, recurseTimes, output) { + let depth = 0; + let keys; + let keySet; + do { + if (depth !== 0 || main === obj) { + obj = ObjectGetPrototypeOf(obj); + // Stop as soon as a null prototype is encountered. + if (obj === null) { + return; + } + // Stop as soon as a built-in object type is detected. + const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); + if (descriptor !== undefined && + typeof descriptor.value === 'function' && + builtInObjects.has(descriptor.value.name)) { + return; + } + } + + if (depth === 0) { + keySet = new SafeSet(); + } else { + ArrayPrototypeForEach(keys, (key) => keySet.add(key)); + } + // Get all own property names and symbols. + keys = ReflectOwnKeys(obj); + ArrayPrototypePush(ctx.seen, main); + for (const key of keys) { + // Ignore the `constructor` property and keys that exist on layers above. + if (key === 'constructor' || + ObjectPrototypeHasOwnProperty(main, key) || + (depth !== 0 && keySet.has(key))) { + continue; + } + const desc = ObjectGetOwnPropertyDescriptor(obj, key); + if (typeof desc.value === 'function') { + continue; + } + const value = formatProperty( + ctx, obj, recurseTimes, key, kObjectType, desc, main); + if (ctx.colors) { + // Faint! + ArrayPrototypePush(output, `\u001b[2m${value}\u001b[22m`); + } else { + ArrayPrototypePush(output, value); + } + } + ArrayPrototypePop(ctx.seen); + // Limit the inspection to up to three prototype layers. Using `recurseTimes` + // is not a good choice here, because it's as if the properties are declared + // on the current object from the users perspective. + } while (++depth !== 3); +} + +function getPrefix(constructor, tag, fallback, size = '') { + if (constructor === null) { + if (tag !== '' && fallback !== tag) { + return `[${fallback}${size}: null prototype] [${tag}] `; + } + return `[${fallback}${size}: null prototype] `; + } + + if (tag !== '' && constructor !== tag) { + return `${constructor}${size} [${tag}] `; + } + return `${constructor}${size} `; +} + +// Look up the keys of the object. +function getKeys(value, showHidden) { + let keys; + const symbols = ObjectGetOwnPropertySymbols(value); + if (showHidden) { + keys = ObjectGetOwnPropertyNames(value); + if (symbols.length !== 0) + ArrayPrototypePushApply(keys, symbols); + } else { + // This might throw if `value` is a Module Namespace Object from an + // unevaluated module, but we don't want to perform the actual type + // check because it's expensive. + // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 + // and modify this logic as needed. + try { + keys = ObjectKeys(value); + } catch (err) { + assert(isNativeError(err) && err.name === 'ReferenceError' && + isModuleNamespaceObject(value)); + keys = ObjectGetOwnPropertyNames(value); + } + if (symbols.length !== 0) { + const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); + ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter)); + } + } + return keys; +} + +function getCtxStyle(value, constructor, tag) { + let fallback = ''; + if (constructor === null) { + fallback = internalGetConstructorName(value); + if (fallback === tag) { + fallback = 'Object'; + } + } + return getPrefix(constructor, tag, fallback); +} + +function formatProxy(ctx, proxy, recurseTimes) { + if (recurseTimes > ctx.depth && ctx.depth !== null) { + return ctx.stylize('Proxy [Array]', 'special'); + } + recurseTimes += 1; + ctx.indentationLvl += 2; + const res = [ + formatValue(ctx, proxy[0], recurseTimes), + formatValue(ctx, proxy[1], recurseTimes), + ]; + ctx.indentationLvl -= 2; + return reduceToSingleString( + ctx, res, '', ['Proxy [', ']'], kArrayExtrasType, recurseTimes); +} + +// Note: using `formatValue` directly requires the indentation level to be +// corrected by setting `ctx.indentationLvL += diff` and then to decrease the +// value afterwards again. +function formatValue(ctx, value, recurseTimes, typedArray) { + // Primitive types cannot have properties. + if (typeof value !== 'object' && + typeof value !== 'function' && + !isUndetectableObject(value)) { + return formatPrimitive(ctx.stylize, value, ctx); + } + if (value === null) { + return ctx.stylize('null', 'null'); + } + + // Memorize the context for custom inspection on proxies. + const context = value; + // Always check for proxies to prevent side effects and to prevent triggering + // any proxy handlers. + const proxy = getProxyDetails(value, !!ctx.showProxy); + if (proxy !== undefined) { + if (ctx.showProxy) { + return formatProxy(ctx, proxy, recurseTimes); + } + value = proxy; + } + + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it. + if (ctx.customInspect) { + const maybeCustom = value[customInspectSymbol]; + if (typeof maybeCustom === 'function' && + // Filter out the util module, its inspect function is special. + maybeCustom !== inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value)) { + // This makes sure the recurseTimes are reported as before while using + // a counter internally. + const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; + const isCrossContext = + proxy !== undefined || !(context instanceof Object); + const ret = FunctionPrototypeCall( + maybeCustom, + context, + depth, + getUserOptions(ctx, isCrossContext), + inspect + ); + // If the custom inspection method returned `this`, don't go into + // infinite recursion. + if (ret !== context) { + if (typeof ret !== 'string') { + return formatValue(ctx, ret, recurseTimes); + } + return ret.replace(/\n/g, `\n${' '.repeat(ctx.indentationLvl)}`); + } + } + } + + // Using an array here is actually better for the average case than using + // a Set. `seen` will only check for the depth and will never grow too large. + if (ctx.seen.includes(value)) { + let index = 1; + if (ctx.circular === undefined) { + ctx.circular = new SafeMap(); + ctx.circular.set(value, index); + } else { + index = ctx.circular.get(value); + if (index === undefined) { + index = ctx.circular.size + 1; + ctx.circular.set(value, index); + } + } + return ctx.stylize(`[Circular *${index}]`, 'special'); + } + + return formatRaw(ctx, value, recurseTimes, typedArray); +} + +function formatRaw(ctx, value, recurseTimes, typedArray) { + let keys; + let protoProps; + if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { + protoProps = []; + } + + const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); + // Reset the variable to check for this later on. + if (protoProps !== undefined && protoProps.length === 0) { + protoProps = undefined; + } + + let tag = value[SymbolToStringTag]; + // Only list the tag in case it's non-enumerable / not an own property. + // Otherwise we'd print this twice. + if (typeof tag !== 'string' || + (tag !== '' && + (ctx.showHidden ? + ObjectPrototypeHasOwnProperty : + ObjectPrototypePropertyIsEnumerable)( + value, SymbolToStringTag + ))) { + tag = ''; + } + let base = ''; + let formatter = getEmptyFormatArray; + let braces; + let noIterator = true; + let i = 0; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + + let extrasType = kObjectType; + + // Iterators and the rest are split to reduce checks. + // We have to check all values in case the constructor is set to null. + // Otherwise it would not possible to identify all types properly. + if (value[SymbolIterator] || constructor === null) { + noIterator = false; + if (ArrayIsArray(value)) { + // Only set the constructor for non ordinary ("Array [...]") arrays. + const prefix = (constructor !== 'Array' || tag !== '') ? + getPrefix(constructor, tag, 'Array', `(${value.length})`) : + ''; + keys = getOwnNonIndexProperties(value, filter); + braces = [`${prefix}[`, ']']; + if (value.length === 0 && keys.length === 0 && protoProps === undefined) + return `${braces[0]}]`; + extrasType = kArrayExtrasType; + formatter = formatArray; + } else if (isSet(value)) { + const size = SetPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, 'Set', `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null ? + formatSet.bind(null, value) : + formatSet.bind(null, SetPrototypeValues(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) + return `${prefix}{}`; + braces = [`${prefix}{`, '}']; + } else if (isMap(value)) { + const size = MapPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, 'Map', `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null ? + formatMap.bind(null, value) : + formatMap.bind(null, MapPrototypeEntries(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) + return `${prefix}{}`; + braces = [`${prefix}{`, '}']; + } else if (isTypedArray(value)) { + keys = getOwnNonIndexProperties(value, filter); + let bound = value; + let fallback = ''; + if (constructor === null) { + fallback = TypedArrayPrototypeGetSymbolToStringTag(value); + // Reconstruct the array information. + bound = new primordials[fallback](value); + } + const size = TypedArrayPrototypeGetLength(value); + const prefix = getPrefix(constructor, tag, fallback, `(${size})`); + braces = [`${prefix}[`, ']']; + if (value.length === 0 && keys.length === 0 && !ctx.showHidden) + return `${braces[0]}]`; + // Special handle the value. The original value is required below. The + // bound function is required to reconstruct missing information. + formatter = formatTypedArray.bind(null, bound, size); + extrasType = kArrayExtrasType; + } else if (isMapIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces('Map', tag); + // Add braces to the formatter parameters. + formatter = formatIterator.bind(null, braces); + } else if (isSetIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces('Set', tag); + // Add braces to the formatter parameters. + formatter = formatIterator.bind(null, braces); + } else { + noIterator = true; + } + } + if (noIterator) { + keys = getKeys(value, ctx.showHidden); + braces = ['{', '}']; + if (constructor === 'Object') { + if (isArgumentsObject(value)) { + braces[0] = '[Arguments] {'; + } else if (tag !== '') { + braces[0] = `${getPrefix(constructor, tag, 'Object')}{`; + } + if (keys.length === 0 && protoProps === undefined) { + return `${braces[0]}}`; + } + } else if (typeof value === 'function') { + base = getFunctionBase(value, constructor, tag); + if (keys.length === 0 && protoProps === undefined) + return ctx.stylize(base, 'special'); + } else if (isRegExp(value)) { + // Make RegExps say that they are RegExps + base = RegExpPrototypeToString( + constructor !== null ? value : new RegExp(value) + ); + const prefix = getPrefix(constructor, tag, 'RegExp'); + if (prefix !== 'RegExp ') + base = `${prefix}${base}`; + if ((keys.length === 0 && protoProps === undefined) || + (recurseTimes > ctx.depth && ctx.depth !== null)) { + return ctx.stylize(base, 'regexp'); + } + } else if (isDate(value)) { + // Make dates with properties first say the date + base = NumberIsNaN(DatePrototypeGetTime(value)) ? + DatePrototypeToString(value) : + DatePrototypeToISOString(value); + const prefix = getPrefix(constructor, tag, 'Date'); + if (prefix !== 'Date ') + base = `${prefix}${base}`; + if (keys.length === 0 && protoProps === undefined) { + return ctx.stylize(base, 'date'); + } + } else if (isError(value)) { + base = formatError(value, constructor, tag, ctx, keys); + if (keys.length === 0 && protoProps === undefined) + return base; + } else if (isAnyArrayBuffer(value)) { + // Fast path for ArrayBuffer and SharedArrayBuffer. + // Can't do the same for DataView because it has a non-primitive + // .buffer property that we need to recurse for. + const arrayType = isArrayBuffer(value) ? 'ArrayBuffer' : + 'SharedArrayBuffer'; + const prefix = getPrefix(constructor, tag, arrayType); + if (typedArray === undefined) { + formatter = formatArrayBuffer; + } else if (keys.length === 0 && protoProps === undefined) { + return prefix + + `{ byteLength: ${formatNumber(ctx.stylize, value.byteLength, false)} }`; + } + braces[0] = `${prefix}{`; + ArrayPrototypeUnshift(keys, 'byteLength'); + } else if (isDataView(value)) { + braces[0] = `${getPrefix(constructor, tag, 'DataView')}{`; + // .buffer goes last, it's not a primitive like the others. + ArrayPrototypeUnshift(keys, 'byteLength', 'byteOffset', 'buffer'); + } else if (isPromise(value)) { + braces[0] = `${getPrefix(constructor, tag, 'Promise')}{`; + formatter = formatPromise; + } else if (isWeakSet(value)) { + braces[0] = `${getPrefix(constructor, tag, 'WeakSet')}{`; + formatter = ctx.showHidden ? formatWeakSet : formatWeakCollection; + } else if (isWeakMap(value)) { + braces[0] = `${getPrefix(constructor, tag, 'WeakMap')}{`; + formatter = ctx.showHidden ? formatWeakMap : formatWeakCollection; + } else if (isModuleNamespaceObject(value)) { + braces[0] = `${getPrefix(constructor, tag, 'Module')}{`; + // Special handle keys for namespace objects. + formatter = formatNamespaceObject.bind(null, keys); + } else if (isBoxedPrimitive(value)) { + base = getBoxedBase(value, ctx, keys, constructor, tag); + if (keys.length === 0 && protoProps === undefined) { + return base; + } + } else { + if (keys.length === 0 && protoProps === undefined) { + if (isExternal(value)) { + const address = getExternalValue(value).toString(16); + return ctx.stylize(`[External: ${address}]`, 'special'); + } + return `${getCtxStyle(value, constructor, tag)}{}`; + } + braces[0] = `${getCtxStyle(value, constructor, tag)}{`; + } + } + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + let constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); + if (constructor !== null) + constructorName = `[${constructorName}]`; + return ctx.stylize(constructorName, 'special'); + } + recurseTimes += 1; + + ctx.seen.push(value); + ctx.currentDepth = recurseTimes; + let output; + const indentationLvl = ctx.indentationLvl; + try { + output = formatter(ctx, value, recurseTimes); + for (i = 0; i < keys.length; i++) { + output.push( + formatProperty(ctx, value, recurseTimes, keys[i], extrasType)); + } + if (protoProps !== undefined) { + output.push(...protoProps); + } + } catch (err) { + const constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); + return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); + } + if (ctx.circular !== undefined) { + const index = ctx.circular.get(value); + if (index !== undefined) { + const reference = ctx.stylize(``, 'special'); + // Add reference always to the very beginning of the output. + if (ctx.compact !== true) { + base = base === '' ? reference : `${reference} ${base}`; + } else { + braces[0] = `${reference} ${braces[0]}`; + } + } + } + ctx.seen.pop(); + + if (ctx.sorted) { + const comparator = ctx.sorted === true ? undefined : ctx.sorted; + if (extrasType === kObjectType) { + output = output.sort(comparator); + } else if (keys.length > 1) { + const sorted = output.slice(output.length - keys.length).sort(comparator); + output.splice(output.length - keys.length, keys.length, ...sorted); + } + } + + const res = reduceToSingleString( + ctx, output, base, braces, extrasType, recurseTimes, value); + const budget = ctx.budget[ctx.indentationLvl] || 0; + const newLength = budget + res.length; + ctx.budget[ctx.indentationLvl] = newLength; + // If any indentationLvl exceeds this limit, limit further inspecting to the + // minimum. Otherwise the recursive algorithm might continue inspecting the + // object even though the maximum string size (~2 ** 28 on 32 bit systems and + // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at + // exactly 2 ** 27 but a bit higher. This depends on the object shape. + // This limit also makes sure that huge objects don't block the event loop + // significantly. + if (newLength > 2 ** 27) { + ctx.depth = -1; + } + return res; +} + +function getIteratorBraces(type, tag) { + if (tag !== `${type} Iterator`) { + if (tag !== '') + tag += '] ['; + tag += `${type} Iterator`; + } + return [`[${tag}] {`, '}']; +} + +function getBoxedBase(value, ctx, keys, constructor, tag) { + let fn; + let type; + if (isNumberObject(value)) { + fn = NumberPrototypeValueOf; + type = 'Number'; + } else if (isStringObject(value)) { + fn = StringPrototypeValueOf; + type = 'String'; + // For boxed Strings, we have to remove the 0-n indexed entries, + // since they just noisy up the output and are redundant + // Make boxed primitive Strings look like such + keys.splice(0, value.length); + } else if (isBooleanObject(value)) { + fn = BooleanPrototypeValueOf; + type = 'Boolean'; + } else if (isBigIntObject(value)) { + fn = BigIntPrototypeValueOf; + type = 'BigInt'; + } else { + fn = SymbolPrototypeValueOf; + type = 'Symbol'; + } + let base = `[${type}`; + if (type !== constructor) { + if (constructor === null) { + base += ' (null prototype)'; + } else { + base += ` (${constructor})`; + } + } + base += `: ${formatPrimitive(stylizeNoColor, fn(value), ctx)}]`; + if (tag !== '' && tag !== constructor) { + base += ` [${tag}]`; + } + if (keys.length !== 0 || ctx.stylize === stylizeNoColor) + return base; + return ctx.stylize(base, StringPrototypeToLowerCase(type)); +} + +function getClassBase(value, constructor, tag) { + const hasName = ObjectPrototypeHasOwnProperty(value, 'name'); + const name = (hasName && value.name) || '(anonymous)'; + let base = `class ${name}`; + if (constructor !== 'Function' && constructor !== null) { + base += ` [${constructor}]`; + } + if (tag !== '' && constructor !== tag) { + base += ` [${tag}]`; + } + if (constructor !== null) { + const superName = ObjectGetPrototypeOf(value).name; + if (superName) { + base += ` extends ${superName}`; + } + } else { + base += ' extends [null prototype]'; + } + return `[${base}]`; +} + +function getFunctionBase(value, constructor, tag) { + const stringified = FunctionPrototypeToString(value); + if (stringified.startsWith('class') && stringified.endsWith('}')) { + const slice = stringified.slice(5, -1); + const bracketIndex = slice.indexOf('{'); + if (bracketIndex !== -1 && + (!slice.slice(0, bracketIndex).includes('(') || + // Slow path to guarantee that it's indeed a class. + classRegExp.test(slice.replace(stripCommentsRegExp)))) { + return getClassBase(value, constructor, tag); + } + } + let type = 'Function'; + if (isGeneratorFunction(value)) { + type = `Generator${type}`; + } + if (isAsyncFunction(value)) { + type = `Async${type}`; + } + let base = `[${type}`; + if (constructor === null) { + base += ' (null prototype)'; + } + if (value.name === '') { + base += ' (anonymous)'; + } else { + base += `: ${value.name}`; + } + base += ']'; + if (constructor !== type && constructor !== null) { + base += ` ${constructor}`; + } + if (tag !== '' && constructor !== tag) { + base += ` [${tag}]`; + } + return base; +} + +function identicalSequenceRange(a, b) { + for (let i = 0; i < a.length - 3; i++) { + // Find the first entry of b that matches the current entry of a. + const pos = b.indexOf(a[i]); + if (pos !== -1) { + const rest = b.length - pos; + if (rest > 3) { + let len = 1; + const maxLen = MathMin(a.length - i, rest); + // Count the number of consecutive entries. + while (maxLen > len && a[i + len] === b[pos + len]) { + len++; + } + if (len > 3) { + return { len, offset: i }; + } + } + } + } + + return { len: 0, offset: 0 }; +} + +function getStackString(error) { + return error.stack ? String(error.stack) : ErrorPrototypeToString(error); +} + +function getStackFrames(ctx, err, stack) { + const frames = stack.split('\n'); + + // Remove stack frames identical to frames in cause. + if (err.cause && isError(err.cause)) { + const causeStack = getStackString(err.cause); + const causeStackStart = causeStack.indexOf('\n at'); + if (causeStackStart !== -1) { + const causeFrames = causeStack.slice(causeStackStart + 1).split('\n'); + const { len, offset } = identicalSequenceRange(frames, causeFrames); + if (len > 0) { + const skipped = len - 2; + const msg = ` ... ${skipped} lines matching cause stack trace ...`; + frames.splice(offset + 1, skipped, ctx.stylize(msg, 'undefined')); + } + } + } + return frames; +} + +function improveStack(stack, constructor, name, tag) { + // A stack trace may contain arbitrary data. Only manipulate the output + // for "regular errors" (errors that "look normal") for now. + let len = name.length; + + if (constructor === null || + (name.endsWith('Error') && + stack.startsWith(name) && + (stack.length === len || stack[len] === ':' || stack[len] === '\n'))) { + let fallback = 'Error'; + if (constructor === null) { + const start = stack.match(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/) || + stack.match(/^([a-z_A-Z0-9-]*Error)$/); + fallback = (start && start[1]) || ''; + len = fallback.length; + fallback = fallback || 'Error'; + } + const prefix = getPrefix(constructor, tag, fallback).slice(0, -1); + if (name !== prefix) { + if (prefix.includes(name)) { + if (len === 0) { + stack = `${prefix}: ${stack}`; + } else { + stack = `${prefix}${stack.slice(len)}`; + } + } else { + stack = `${prefix} [${name}]${stack.slice(len)}`; + } + } + } + return stack; +} + +function removeDuplicateErrorKeys(ctx, keys, err, stack) { + if (!ctx.showHidden && keys.length !== 0) { + for (const name of ['name', 'message', 'stack']) { + const index = keys.indexOf(name); + // Only hide the property in case it's part of the original stack + if (index !== -1 && stack.includes(err[name])) { + keys.splice(index, 1); + } + } + } +} + +function formatError(err, constructor, tag, ctx, keys) { + const name = err.name != null ? String(err.name) : 'Error'; + let stack = getStackString(err); + + removeDuplicateErrorKeys(ctx, keys, err, stack); + + if ('cause' in err && + (keys.length === 0 || !keys.includes('cause'))) { + keys.push('cause'); + } + + stack = improveStack(stack, constructor, name, tag); + + // Ignore the error message if it's contained in the stack. + let pos = (err.message && stack.indexOf(err.message)) || -1; + if (pos !== -1) + pos += err.message.length; + // Wrap the error in brackets in case it has no stack trace. + const stackStart = stack.indexOf('\n at', pos); + if (stackStart === -1) { + stack = `[${stack}]`; + } else { + let newStack = stack.slice(0, stackStart); + const lines = getStackFrames(ctx, err, stack.slice(stackStart + 1)); + if (ctx.colors) { + // Highlight userland code and node modules. + for (const line of lines) { + const core = line.match(coreModuleRegExp); + if (core !== null && NativeModule.exists(core[1])) { + newStack += `\n${ctx.stylize(line, 'undefined')}`; + } else { + // This adds underscores to all node_modules to quickly identify them. + let nodeModule; + newStack += '\n'; + let pos = 0; + while ((nodeModule = nodeModulesRegExp.exec(line)) !== null) { + // '/node_modules/'.length === 14 + newStack += line.slice(pos, nodeModule.index + 14); + newStack += ctx.stylize(nodeModule[1], 'module'); + pos = nodeModule.index + nodeModule[0].length; + } + newStack += pos === 0 ? line : line.slice(pos); + } + } + } else { + newStack += `\n${lines.join('\n')}`; + } + stack = newStack; + } + // The message and the stack have to be indented as well! + if (ctx.indentationLvl !== 0) { + const indentation = ' '.repeat(ctx.indentationLvl); + stack = stack.replace(/\n/g, `\n${indentation}`); + } + return stack; +} + +function groupArrayElements(ctx, output, value) { + let totalLength = 0; + let maxLength = 0; + let i = 0; + let outputLength = output.length; + if (ctx.maxArrayLength < output.length) { + // This makes sure the "... n more items" part is not taken into account. + outputLength--; + } + const separatorSpace = 2; // Add 1 for the space and 1 for the separator. + const dataLen = new Array(outputLength); + // Calculate the total length of all output entries and the individual max + // entries length of all output entries. We have to remove colors first, + // otherwise the length would not be calculated properly. + for (; i < outputLength; i++) { + const len = getStringWidth(output[i], ctx.colors); + dataLen[i] = len; + totalLength += len + separatorSpace; + if (maxLength < len) + maxLength = len; + } + // Add two to `maxLength` as we add a single whitespace character plus a comma + // in-between two entries. + const actualMax = maxLength + separatorSpace; + // Check if at least three entries fit next to each other and prevent grouping + // of arrays that contains entries of very different length (i.e., if a single + // entry is longer than 1/5 of all other entries combined). Otherwise the + // space in-between small entries would be enormous. + if (actualMax * 3 + ctx.indentationLvl < ctx.breakLength && + (totalLength / actualMax > 5 || maxLength <= 6)) { + + const approxCharHeights = 2.5; + const averageBias = MathSqrt(actualMax - totalLength / output.length); + const biasedMax = MathMax(actualMax - 3 - averageBias, 1); + // Dynamically check how many columns seem possible. + const columns = MathMin( + // Ideally a square should be drawn. We expect a character to be about 2.5 + // times as high as wide. This is the area formula to calculate a square + // which contains n rectangles of size `actualMax * approxCharHeights`. + // Divide that by `actualMax` to receive the correct number of columns. + // The added bias increases the columns for short entries. + MathRound( + MathSqrt( + approxCharHeights * biasedMax * outputLength + ) / biasedMax + ), + // Do not exceed the breakLength. + MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), + // Limit array grouping for small `compact` modes as the user requested + // minimal grouping. + ctx.compact * 4, + // Limit the columns to a maximum of fifteen. + 15 + ); + // Return with the original output if no grouping should happen. + if (columns <= 1) { + return output; + } + const tmp = []; + const maxLineLength = []; + for (let i = 0; i < columns; i++) { + let lineMaxLength = 0; + for (let j = i; j < output.length; j += columns) { + if (dataLen[j] > lineMaxLength) + lineMaxLength = dataLen[j]; + } + lineMaxLength += separatorSpace; + maxLineLength[i] = lineMaxLength; + } + let order = StringPrototypePadStart; + if (value !== undefined) { + for (let i = 0; i < output.length; i++) { + if (typeof value[i] !== 'number' && typeof value[i] !== 'bigint') { + order = StringPrototypePadEnd; + break; + } + } + } + // Each iteration creates a single line of grouped entries. + for (let i = 0; i < outputLength; i += columns) { + // The last lines may contain less entries than columns. + const max = MathMin(i + columns, outputLength); + let str = ''; + let j = i; + for (; j < max - 1; j++) { + // Calculate extra color padding in case it's active. This has to be + // done line by line as some lines might contain more colors than + // others. + const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; + str += order(`${output[j]}, `, padding, ' '); + } + if (order === StringPrototypePadStart) { + const padding = maxLineLength[j - i] + + output[j].length - + dataLen[j] - + separatorSpace; + str += StringPrototypePadStart(output[j], padding, ' '); + } else { + str += output[j]; + } + ArrayPrototypePush(tmp, str); + } + if (ctx.maxArrayLength < output.length) { + ArrayPrototypePush(tmp, output[outputLength]); + } + output = tmp; + } + return output; +} + +function handleMaxCallStackSize(ctx, err, constructorName, indentationLvl) { + if (isStackOverflowError(err)) { + ctx.seen.pop(); + ctx.indentationLvl = indentationLvl; + return ctx.stylize( + `[${constructorName}: Inspection interrupted ` + + 'prematurely. Maximum call stack size exceeded.]', + 'special' + ); + } + /* c8 ignore next */ + assert.fail(err.stack); +} + +function addNumericSeparator(integerString) { + let result = ''; + let i = integerString.length; + const start = integerString.startsWith('-') ? 1 : 0; + for (; i >= start + 4; i -= 3) { + result = `_${integerString.slice(i - 3, i)}${result}`; + } + return i === integerString.length ? + integerString : + `${integerString.slice(0, i)}${result}`; +} + +function addNumericSeparatorEnd(integerString) { + let result = ''; + let i = 0; + for (; i < integerString.length - 3; i += 3) { + result += `${integerString.slice(i, i + 3)}_`; + } + return i === 0 ? + integerString : + `${result}${integerString.slice(i)}`; +} + +function formatNumber(fn, number, numericSeparator) { + if (!numericSeparator) { + // Format -0 as '-0'. Checking `number === -0` won't distinguish 0 from -0. + if (ObjectIs(number, -0)) { + return fn('-0', 'number'); + } + return fn(`${number}`, 'number'); + } + const integer = MathTrunc(number); + const string = String(integer); + if (integer === number) { + if (!NumberIsFinite(number) || string.includes('e')) { + return fn(string, 'number'); + } + return fn(`${addNumericSeparator(string)}`, 'number'); + } + if (NumberIsNaN(number)) { + return fn(string, 'number'); + } + return fn(`${ + addNumericSeparator(string) + }.${ + addNumericSeparatorEnd(String(number).slice(string.length + 1)) + }`, 'number'); +} + +function formatBigInt(fn, bigint, numericSeparator) { + const string = String(bigint); + if (!numericSeparator) { + return fn(`${string}n`, 'bigint'); + } + return fn(`${addNumericSeparator(string)}n`, 'bigint'); +} + +function formatPrimitive(fn, value, ctx) { + if (typeof value === 'string') { + let trailer = ''; + if (value.length > ctx.maxStringLength) { + const remaining = value.length - ctx.maxStringLength; + value = value.slice(0, ctx.maxStringLength); + trailer = `... ${remaining} more character${remaining > 1 ? 's' : ''}`; + } + if (ctx.compact !== true && + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. + value.length > kMinLineLength && + value.length > ctx.breakLength - ctx.indentationLvl - 4) { + return value + .split(/(?:=\n)/) + .map((line) => fn(strEscape(line), 'string')) + .join(` +\n${' '.repeat(ctx.indentationLvl + 2)}`) + trailer; + } + return fn(strEscape(value), 'string') + trailer; + } + if (typeof value === 'number') + return formatNumber(fn, value, ctx.numericSeparator); + if (typeof value === 'bigint') + return formatBigInt(fn, value, ctx.numericSeparator); + if (typeof value === 'boolean') + return fn(`${value}`, 'boolean'); + if (typeof value === 'undefined') + return fn('undefined', 'undefined'); + // es6 symbol primitive + return fn(SymbolPrototypeToString(value), 'symbol'); +} + +function formatNamespaceObject(keys, ctx, value, recurseTimes) { + const output = new Array(keys.length); + for (let i = 0; i < keys.length; i++) { + try { + output[i] = formatProperty(ctx, value, recurseTimes, keys[i], + kObjectType); + } catch (err) { + assert(isNativeError(err) && err.name === 'ReferenceError'); + // Use the existing functionality. This makes sure the indentation and + // line breaks are always correct. Otherwise it is very difficult to keep + // this aligned, even though this is a hacky way of dealing with this. + const tmp = { [keys[i]]: '' }; + output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); + const pos = output[i].lastIndexOf(' '); + // We have to find the last whitespace and have to replace that value as + // it will be visualized as a regular string. + output[i] = output[i].slice(0, pos + 1) + + ctx.stylize('', 'special'); + } + } + // Reset the keys to an empty array. This prevents duplicated inspection. + keys.length = 0; + return output; +} + +// The array is sparse and/or has extra keys +function formatSpecialArray(ctx, value, recurseTimes, maxLength, output, i) { + const keys = ObjectKeys(value); + let index = i; + for (; i < keys.length && output.length < maxLength; i++) { + const key = keys[i]; + const tmp = +key; + // Arrays can only have up to 2^32 - 1 entries + if (tmp > 2 ** 32 - 2) { + break; + } + if (`${index}` !== key) { + if (!numberRegExp.test(key)) { + break; + } + const emptyItems = tmp - index; + const ending = emptyItems > 1 ? 's' : ''; + const message = `<${emptyItems} empty item${ending}>`; + output.push(ctx.stylize(message, 'undefined')); + index = tmp; + if (output.length === maxLength) { + break; + } + } + output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); + index++; + } + const remaining = value.length - index; + if (output.length !== maxLength) { + if (remaining > 0) { + const ending = remaining > 1 ? 's' : ''; + const message = `<${remaining} empty item${ending}>`; + output.push(ctx.stylize(message, 'undefined')); + } + } else if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); + } + return output; +} + +function formatArrayBuffer(ctx, value) { + let buffer; + try { + buffer = new Uint8Array(value); + } catch { + return [ctx.stylize('(detached)', 'special')]; + } + if (hexSlice === undefined) + hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); + let str = StringPrototypeTrim(StringPrototypeReplace( + hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)), + /(.{2})/g, '$1 ')); + const remaining = buffer.length - ctx.maxArrayLength; + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + return [`${ctx.stylize('[Uint8Contents]', 'special')}: <${str}>`]; +} + +function formatArray(ctx, value, recurseTimes) { + const valLen = value.length; + const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + // Special handle sparse arrays. + if (!ObjectPrototypeHasOwnProperty(value, i)) { + return formatSpecialArray(ctx, value, recurseTimes, len, output, i); + } + output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); + } + if (remaining > 0) + output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); + return output; +} + +function formatTypedArray(value, length, ctx, ignored, recurseTimes) { + const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), length); + const remaining = value.length - maxLength; + const output = new Array(maxLength); + const elementFormatter = value.length > 0 && typeof value[0] === 'number' ? + formatNumber : + formatBigInt; + for (let i = 0; i < maxLength; ++i) { + output[i] = elementFormatter(ctx.stylize, value[i], ctx.numericSeparator); + } + if (remaining > 0) { + output[maxLength] = `... ${remaining} more item${remaining > 1 ? 's' : ''}`; + } + if (ctx.showHidden) { + // .buffer goes last, it's not a primitive like the others. + // All besides `BYTES_PER_ELEMENT` are actually getters. + ctx.indentationLvl += 2; + for (const key of [ + 'BYTES_PER_ELEMENT', + 'length', + 'byteLength', + 'byteOffset', + 'buffer', + ]) { + const str = formatValue(ctx, value[key], recurseTimes, true); + ArrayPrototypePush(output, `[${key}]: ${str}`); + } + ctx.indentationLvl -= 2; + } + return output; +} + +function formatSet(value, ctx, ignored, recurseTimes) { + const output = []; + ctx.indentationLvl += 2; + for (const v of value) { + ArrayPrototypePush(output, formatValue(ctx, v, recurseTimes)); + } + ctx.indentationLvl -= 2; + return output; +} + +function formatMap(value, ctx, ignored, recurseTimes) { + const output = []; + ctx.indentationLvl += 2; + for (const { 0: k, 1: v } of value) { + output.push( + `${formatValue(ctx, k, recurseTimes)} => ${formatValue(ctx, v, recurseTimes)}` + ); + } + ctx.indentationLvl -= 2; + return output; +} + +function formatSetIterInner(ctx, recurseTimes, entries, state) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + const maxLength = MathMin(maxArrayLength, entries.length); + const output = new Array(maxLength); + ctx.indentationLvl += 2; + for (let i = 0; i < maxLength; i++) { + output[i] = formatValue(ctx, entries[i], recurseTimes); + } + ctx.indentationLvl -= 2; + if (state === kWeak && !ctx.sorted) { + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + ArrayPrototypeSort(output); + } + const remaining = entries.length - maxLength; + if (remaining > 0) { + ArrayPrototypePush(output, + `... ${remaining} more item${remaining > 1 ? 's' : ''}`); + } + return output; +} + +function formatMapIterInner(ctx, recurseTimes, entries, state) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + // Entries exist as [key1, val1, key2, val2, ...] + const len = entries.length / 2; + const remaining = len - maxArrayLength; + const maxLength = MathMin(maxArrayLength, len); + let output = new Array(maxLength); + let i = 0; + ctx.indentationLvl += 2; + if (state === kWeak) { + for (; i < maxLength; i++) { + const pos = i * 2; + output[i] = + `${formatValue(ctx, entries[pos], recurseTimes)} => ${formatValue(ctx, entries[pos + 1], recurseTimes)}`; + } + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + if (!ctx.sorted) + output = output.sort(); + } else { + for (; i < maxLength; i++) { + const pos = i * 2; + const res = [ + formatValue(ctx, entries[pos], recurseTimes), + formatValue(ctx, entries[pos + 1], recurseTimes), + ]; + output[i] = reduceToSingleString( + ctx, res, '', ['[', ']'], kArrayExtrasType, recurseTimes); + } + } + ctx.indentationLvl -= 2; + if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); + } + return output; +} + +function formatWeakCollection(ctx) { + return [ctx.stylize('', 'special')]; +} + +function formatWeakSet(ctx, value, recurseTimes) { + const entries = previewEntries(value); + return formatSetIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatWeakMap(ctx, value, recurseTimes) { + const entries = previewEntries(value); + return formatMapIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatIterator(braces, ctx, value, recurseTimes) { + const { 0: entries, 1: isKeyValue } = previewEntries(value, true); + if (isKeyValue) { + // Mark entry iterators as such. + braces[0] = braces[0].replace(/ Iterator] {$/, ' Entries] {'); + return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); + } + + return formatSetIterInner(ctx, recurseTimes, entries, kIterator); +} + +function formatPromise(ctx, value, recurseTimes) { + let output; + const { 0: state, 1: result } = getPromiseDetails(value); + if (state === kPending) { + output = [ctx.stylize('', 'special')]; + } else { + ctx.indentationLvl += 2; + const str = formatValue(ctx, result, recurseTimes); + ctx.indentationLvl -= 2; + output = [ + state === kRejected ? + `${ctx.stylize('', 'special')} ${str}` : + str, + ]; + } + return output; +} + +function formatProperty(ctx, value, recurseTimes, key, type, desc, + original = value) { + let name, str; + let extra = ' '; + desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || + { value: value[key], enumerable: true }; + if (desc.value !== undefined) { + const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; + ctx.indentationLvl += diff; + str = formatValue(ctx, desc.value, recurseTimes); + if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { + extra = `\n${' '.repeat(ctx.indentationLvl)}`; + } + ctx.indentationLvl -= diff; + } else if (desc.get !== undefined) { + const label = desc.set !== undefined ? 'Getter/Setter' : 'Getter'; + const s = ctx.stylize; + const sp = 'special'; + if (ctx.getters && (ctx.getters === true || + (ctx.getters === 'get' && desc.set === undefined) || + (ctx.getters === 'set' && desc.set !== undefined))) { + try { + const tmp = FunctionPrototypeCall(desc.get, original); + ctx.indentationLvl += 2; + if (tmp === null) { + str = `${s(`[${label}:`, sp)} ${s('null', 'null')}${s(']', sp)}`; + } else if (typeof tmp === 'object') { + str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; + } else { + const primitive = formatPrimitive(s, tmp, ctx); + str = `${s(`[${label}:`, sp)} ${primitive}${s(']', sp)}`; + } + ctx.indentationLvl -= 2; + } catch (err) { + const message = ``; + str = `${s(`[${label}:`, sp)} ${message}${s(']', sp)}`; + } + } else { + str = ctx.stylize(`[${label}]`, sp); + } + } else if (desc.set !== undefined) { + str = ctx.stylize('[Setter]', 'special'); + } else { + str = ctx.stylize('undefined', 'undefined'); + } + if (type === kArrayType) { + return str; + } + if (typeof key === 'symbol') { + const tmp = StringPrototypeReplace( + SymbolPrototypeToString(key), + strEscapeSequencesReplacer, escapeFn + ); + name = `[${ctx.stylize(tmp, 'symbol')}]`; + } else if (key === '__proto__') { + name = "['__proto__']"; + } else if (desc.enumerable === false) { + const tmp = StringPrototypeReplace(key, + strEscapeSequencesReplacer, escapeFn); + name = `[${tmp}]`; + } else if (RegExpPrototypeTest(keyStrRegExp, key)) { + name = ctx.stylize(key, 'name'); + } else { + name = ctx.stylize(strEscape(key), 'string'); + } + return `${name}:${extra}${str}`; +} + +function isBelowBreakLength(ctx, output, start, base) { + // Each entry is separated by at least a comma. Thus, we start with a total + // length of at least `output.length`. In addition, some cases have a + // whitespace in-between each other that is added to the total as well. + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. Check the performance overhead and make it an opt-in in case it's + // significant. + let totalLength = output.length + start; + if (totalLength + output.length > ctx.breakLength) + return false; + for (let i = 0; i < output.length; i++) { + if (ctx.colors) { + totalLength += removeColors(output[i]).length; + } else { + totalLength += output[i].length; + } + if (totalLength > ctx.breakLength) { + return false; + } + } + // Do not line up properties on the same line if `base` contains line breaks. + return base === '' || !StringPrototypeIncludes(base, '\n'); +} + +function reduceToSingleString( + ctx, output, base, braces, extrasType, recurseTimes, value) { + if (ctx.compact !== true) { + if (typeof ctx.compact === 'number' && ctx.compact >= 1) { + // Memorize the original output length. In case the output is grouped, + // prevent lining up the entries on a single line. + const entries = output.length; + // Group array elements together if the array contains at least six + // separate entries. + if (extrasType === kArrayExtrasType && entries > 6) { + output = groupArrayElements(ctx, output, value); + } + // `ctx.currentDepth` is set to the most inner depth of the currently + // inspected object part while `recurseTimes` is the actual current depth + // that is inspected. + // + // Example: + // + // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } + // + // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max + // depth of 1. + // + // Consolidate all entries of the local most inner depth up to + // `ctx.compact`, as long as the properties are smaller than + // `ctx.breakLength`. + if (ctx.currentDepth - recurseTimes < ctx.compact && + entries === output.length) { + // Line up all entries on a single line in case the entries do not + // exceed `breakLength`. Add 10 as constant to start next to all other + // factors that may reduce `breakLength`. + const start = output.length + ctx.indentationLvl + + braces[0].length + base.length + 10; + if (isBelowBreakLength(ctx, output, start, base)) { + const joinedOutput = join(output, ', '); + if (!joinedOutput.includes('\n')) { + return `${base ? `${base} ` : ''}${braces[0]} ${joinedOutput}` + + ` ${braces[1]}`; + } + } + } + } + // Line up each entry on an individual line. + const indentation = `\n${StringPrototypeRepeat(' ', ctx.indentationLvl)}`; + return `${base ? `${base} ` : ''}${braces[0]}${indentation} ` + + `${join(output, `,${indentation} `)}${indentation}${braces[1]}`; + } + // Line up all entries on a single line in case the entries do not exceed + // `breakLength`. + if (isBelowBreakLength(ctx, output, 0, base)) { + return `${braces[0]}${base ? ` ${base}` : ''} ${join(output, ', ')} ` + + braces[1]; + } + const indentation = StringPrototypeRepeat(' ', ctx.indentationLvl); + // If the opening "brace" is too large, like in the case of "Set {", + // we need to force the first item to be on the next line or the + // items will not line up correctly. + const ln = base === '' && braces[0].length === 1 ? + ' ' : `${base ? ` ${base}` : ''}\n${indentation} `; + // Line up each entry on an individual line. + return `${braces[0]}${ln}${join(output, `,\n${indentation} `)} ${braces[1]}`; +} + +function hasBuiltInToString(value) { + // Prevent triggering proxy traps. + const getFullProxy = false; + const proxyTarget = getProxyDetails(value, getFullProxy); + if (proxyTarget !== undefined) { + value = proxyTarget; + } + + // Count objects that have no `toString` function as built-in. + if (typeof value.toString !== 'function') { + return true; + } + + // The object has a own `toString` property. Thus it's not not a built-in one. + if (ObjectPrototypeHasOwnProperty(value, 'toString')) { + return false; + } + + // Find the object that has the `toString` property as own property in the + // prototype chain. + let pointer = value; + do { + pointer = ObjectGetPrototypeOf(pointer); + } while (!ObjectPrototypeHasOwnProperty(pointer, 'toString')); + + // Check closer if the object is a built-in. + const descriptor = ObjectGetOwnPropertyDescriptor(pointer, 'constructor'); + return descriptor !== undefined && + typeof descriptor.value === 'function' && + builtInObjects.has(descriptor.value.name); +} + +const firstErrorLine = (error) => + StringPrototypeSplit(error.message, '\n', 1)[0]; +let CIRCULAR_ERROR_MESSAGE; +function tryStringify(arg) { + try { + return JSONStringify(arg); + } catch (err) { + // Populate the circular error message lazily + if (!CIRCULAR_ERROR_MESSAGE) { + try { + const a = {}; a.a = a; JSONStringify(a); + } catch (circularError) { + CIRCULAR_ERROR_MESSAGE = firstErrorLine(circularError); + } + } + if (err.name === 'TypeError' && + firstErrorLine(err) === CIRCULAR_ERROR_MESSAGE) { + return '[Circular]'; + } + throw err; + } +} + +function format(...args) { + return formatWithOptionsInternal(undefined, args); +} + +function formatWithOptions(inspectOptions, ...args) { + if (typeof inspectOptions !== 'object' || inspectOptions === null) { + throw new ERR_INVALID_ARG_TYPE( + 'inspectOptions', 'object', inspectOptions); + } + return formatWithOptionsInternal(inspectOptions, args); +} + +function formatNumberNoColor(number, options) { + return formatNumber( + stylizeNoColor, + number, + options?.numericSeparator ?? inspectDefaultOptions.numericSeparator + ); +} + +function formatBigIntNoColor(bigint, options) { + return formatBigInt( + stylizeNoColor, + bigint, + options?.numericSeparator ?? inspectDefaultOptions.numericSeparator + ); +} + +function formatWithOptionsInternal(inspectOptions, args) { + const first = args[0]; + let a = 0; + let str = ''; + let join = ''; + + if (typeof first === 'string') { + if (args.length === 1) { + return first; + } + let tempStr; + let lastPos = 0; + + for (let i = 0; i < first.length - 1; i++) { + if (StringPrototypeCharCodeAt(first, i) === 37) { // '%' + const nextChar = StringPrototypeCharCodeAt(first, ++i); + if (a + 1 !== args.length) { + switch (nextChar) { + case 115: { // 's' + const tempArg = args[++a]; + if (typeof tempArg === 'number') { + tempStr = formatNumberNoColor(tempArg, inspectOptions); + } else if (typeof tempArg === 'bigint') { + tempStr = formatBigIntNoColor(tempArg, inspectOptions); + } else if (typeof tempArg !== 'object' || + tempArg === null || + !hasBuiltInToString(tempArg)) { + tempStr = String(tempArg); + } else { + tempStr = inspect(tempArg, { + ...inspectOptions, + compact: 3, + colors: false, + depth: 0 + }); + } + break; + } + case 106: // 'j' + tempStr = tryStringify(args[++a]); + break; + case 100: { // 'd' + const tempNum = args[++a]; + if (typeof tempNum === 'bigint') { + tempStr = formatBigIntNoColor(tempNum, inspectOptions); + } else if (typeof tempNum === 'symbol') { + tempStr = 'NaN'; + } else { + tempStr = formatNumberNoColor(Number(tempNum), inspectOptions); + } + break; + } + case 79: // 'O' + tempStr = inspect(args[++a], inspectOptions); + break; + case 111: // 'o' + tempStr = inspect(args[++a], { + ...inspectOptions, + showHidden: true, + showProxy: true, + depth: 4 + }); + break; + case 105: { // 'i' + const tempInteger = args[++a]; + if (typeof tempInteger === 'bigint') { + tempStr = formatBigIntNoColor(tempInteger, inspectOptions); + } else if (typeof tempInteger === 'symbol') { + tempStr = 'NaN'; + } else { + tempStr = formatNumberNoColor( + NumberParseInt(tempInteger), inspectOptions); + } + break; + } + case 102: { // 'f' + const tempFloat = args[++a]; + if (typeof tempFloat === 'symbol') { + tempStr = 'NaN'; + } else { + tempStr = formatNumberNoColor( + NumberParseFloat(tempFloat), inspectOptions); + } + break; + } + case 99: // 'c' + a += 1; + tempStr = ''; + break; + case 37: // '%' + str += StringPrototypeSlice(first, lastPos, i); + lastPos = i + 1; + continue; + default: // Any other character is not a correct placeholder + continue; + } + if (lastPos !== i - 1) { + str += StringPrototypeSlice(first, lastPos, i - 1); + } + str += tempStr; + lastPos = i + 1; + } else if (nextChar === 37) { + str += StringPrototypeSlice(first, lastPos, i); + lastPos = i + 1; + } + } + } + if (lastPos !== 0) { + a++; + join = ' '; + if (lastPos < first.length) { + str += StringPrototypeSlice(first, lastPos); + } + } + } + + while (a < args.length) { + const value = args[a]; + str += join; + str += typeof value !== 'string' ? inspect(value, inspectOptions) : value; + join = ' '; + a++; + } + return str; +} + +if (false) { + const icu = {}; + // icu.getStringWidth(string, ambiguousAsFullWidth, expandEmojiSequence) + // Defaults: ambiguousAsFullWidth = false; expandEmojiSequence = true; + // TODO(BridgeAR): Expose the options to the user. That is probably the + // best thing possible at the moment, since it's difficult to know what + // the receiving end supports. + getStringWidth = function getStringWidth(str, removeControlChars = true) { + let width = 0; + + if (removeControlChars) + str = stripVTControlCharacters(str); + for (let i = 0; i < str.length; i++) { + // Try to avoid calling into C++ by first handling the ASCII portion of + // the string. If it is fully ASCII, we skip the C++ part. + const code = str.charCodeAt(i); + if (code >= 127) { + width += icu.getStringWidth(str.slice(i).normalize('NFC')); + break; + } + width += code >= 32 ? 1 : 0; + } + return width; + }; +} else { + /** + * Returns the number of columns required to display the given string. + */ + getStringWidth = function getStringWidth(str, removeControlChars = true) { + let width = 0; + + if (removeControlChars) + str = stripVTControlCharacters(str); + str = StringPrototypeNormalize(str, 'NFC'); + for (const char of new SafeStringIterator(str)) { + const code = StringPrototypeCodePointAt(char, 0); + if (isFullWidthCodePoint(code)) { + width += 2; + } else if (!isZeroWidthCodePoint(code)) { + width++; + } + } + + return width; + }; + + /** + * Returns true if the character represented by a given + * Unicode code point is full-width. Otherwise returns false. + */ + const isFullWidthCodePoint = (code) => { + // Code points are partially derived from: + // https://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt + return code >= 0x1100 && ( + code <= 0x115f || // Hangul Jamo + code === 0x2329 || // LEFT-POINTING ANGLE BRACKET + code === 0x232a || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (code >= 0x3250 && code <= 0x4dbf) || + // CJK Unified Ideographs .. Yi Radicals + (code >= 0x4e00 && code <= 0xa4c6) || + // Hangul Jamo Extended-A + (code >= 0xa960 && code <= 0xa97c) || + // Hangul Syllables + (code >= 0xac00 && code <= 0xd7a3) || + // CJK Compatibility Ideographs + (code >= 0xf900 && code <= 0xfaff) || + // Vertical Forms + (code >= 0xfe10 && code <= 0xfe19) || + // CJK Compatibility Forms .. Small Form Variants + (code >= 0xfe30 && code <= 0xfe6b) || + // Halfwidth and Fullwidth Forms + (code >= 0xff01 && code <= 0xff60) || + (code >= 0xffe0 && code <= 0xffe6) || + // Kana Supplement + (code >= 0x1b000 && code <= 0x1b001) || + // Enclosed Ideographic Supplement + (code >= 0x1f200 && code <= 0x1f251) || + // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff + // Emoticons 0x1f600 - 0x1f64f + (code >= 0x1f300 && code <= 0x1f64f) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (code >= 0x20000 && code <= 0x3fffd) + ); + }; + + const isZeroWidthCodePoint = (code) => { + return code <= 0x1F || // C0 control codes + (code >= 0x7F && code <= 0x9F) || // C1 control codes + (code >= 0x300 && code <= 0x36F) || // Combining Diacritical Marks + (code >= 0x200B && code <= 0x200F) || // Modifying Invisible Characters + // Combining Diacritical Marks for Symbols + (code >= 0x20D0 && code <= 0x20FF) || + (code >= 0xFE00 && code <= 0xFE0F) || // Variation Selectors + (code >= 0xFE20 && code <= 0xFE2F) || // Combining Half Marks + (code >= 0xE0100 && code <= 0xE01EF); // Variation Selectors + }; +} + +/** + * Remove all VT control characters. Use to estimate displayed string width. + */ +function stripVTControlCharacters(str) { + validateString(str, 'str'); + + return str.replace(ansi, ''); +} + +module.exports = { + inspect, + format, + formatWithOptions, + getStringWidth, + inspectDefaultOptions, + stripVTControlCharacters +}; diff --git a/lib/internal/inspect.js b/lib/internal/inspect.js new file mode 100644 index 0000000000..bbf1224a0e --- /dev/null +++ b/lib/internal/inspect.js @@ -0,0 +1,2299 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + ArrayPrototypeFilter, + ArrayPrototypeForEach, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSort, + ArrayPrototypeUnshift, + BigIntPrototypeValueOf, + BooleanPrototypeValueOf, + DatePrototypeGetTime, + DatePrototypeToISOString, + DatePrototypeToString, + ErrorPrototypeToString, + FunctionPrototypeCall, + FunctionPrototypeToString, + JSONStringify, + MapPrototypeGetSize, + MapPrototypeEntries, + MathFloor, + MathMax, + MathMin, + MathRound, + MathSqrt, + MathTrunc, + Number, + NumberIsFinite, + NumberIsNaN, + NumberParseFloat, + NumberParseInt, + NumberPrototypeValueOf, + Object, + ObjectAssign, + ObjectCreate, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertyNames, + ObjectGetOwnPropertySymbols, + ObjectGetPrototypeOf, + ObjectIs, + ObjectKeys, + ObjectPrototypeHasOwnProperty, + ObjectPrototypePropertyIsEnumerable, + ObjectSeal, + ObjectSetPrototypeOf, + ReflectOwnKeys, + RegExp, + RegExpPrototypeTest, + RegExpPrototypeToString, + SafeStringIterator, + SafeMap, + SafeSet, + SetPrototypeGetSize, + SetPrototypeValues, + String, + StringPrototypeCharCodeAt, + StringPrototypeCodePointAt, + StringPrototypeIncludes, + StringPrototypeNormalize, + StringPrototypePadEnd, + StringPrototypePadStart, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeTrim, + StringPrototypeValueOf, + SymbolPrototypeToString, + SymbolPrototypeValueOf, + SymbolIterator, + SymbolToStringTag, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeGetSymbolToStringTag, + Uint8Array, + globalThis, + uncurryThis, +} = require('./primordials'); + +const { + getOwnNonIndexProperties, + getPromiseDetails, + getProxyDetails, + kPending, + kRejected, + previewEntries, + getConstructorName: internalGetConstructorName, + getExternalValue, + propertyFilter: { + ALL_PROPERTIES, + ONLY_ENUMERABLE + } +} = require('../util'); + +const { + customInspectSymbol, + isError, + join, + removeColors +} = require('../util'); + +const { + codes: { + ERR_INVALID_ARG_TYPE + }, + isStackOverflowError +} = require('./errors'); + +const { + isAsyncFunction, + isGeneratorFunction, + isAnyArrayBuffer, + isArrayBuffer, + isArgumentsObject, + isBoxedPrimitive, + isDataView, + isExternal, + isMap, + isMapIterator, + isModuleNamespaceObject, + isNativeError, + isPromise, + isSet, + isSetIterator, + isWeakMap, + isWeakSet, + isRegExp, + isDate, + isTypedArray, + isStringObject, + isNumberObject, + isBooleanObject, + isBigIntObject, +} = require('../util'); + +const assert = require('assert'); + +const { NativeModule } = + { + NativeModule: { + exists() { + return false; + } + } + } + +const { + validateObject, + validateString, +} = require('./validators'); + +let hexSlice; + +const builtInObjects = new SafeSet( + ArrayPrototypeFilter( + ObjectGetOwnPropertyNames(globalThis), + (e) => RegExpPrototypeTest(/^[A-Z][a-zA-Z0-9]+$/, e) + ) +); + +// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot +const isUndetectableObject = (v) => typeof v === 'undefined' && v !== undefined; + +// These options must stay in sync with `getUserOptions`. So if any option will +// be added or removed, `getUserOptions` must also be updated accordingly. +const inspectDefaultOptions = ObjectSeal({ + showHidden: false, + depth: 2, + colors: false, + customInspect: true, + showProxy: false, + maxArrayLength: 100, + maxStringLength: 10000, + breakLength: 80, + compact: 3, + sorted: false, + getters: false, + numericSeparator: false, +}); + +const kObjectType = 0; +const kArrayType = 1; +const kArrayExtrasType = 2; + +/* eslint-disable no-control-regex */ +const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c\x7f-\x9f]|[\ud800-\udbff](?![\udc00-\udfff])|(?<~]))'; +const ansi = new RegExp(ansiPattern, 'g'); + +let getStringWidth; + +function getUserOptions(ctx, isCrossContext) { + const ret = { + stylize: ctx.stylize, + showHidden: ctx.showHidden, + depth: ctx.depth, + colors: ctx.colors, + customInspect: ctx.customInspect, + showProxy: ctx.showProxy, + maxArrayLength: ctx.maxArrayLength, + maxStringLength: ctx.maxStringLength, + breakLength: ctx.breakLength, + compact: ctx.compact, + sorted: ctx.sorted, + getters: ctx.getters, + numericSeparator: ctx.numericSeparator, + ...ctx.userOptions + }; + + // Typically, the target value will be an instance of `Object`. If that is + // *not* the case, the object may come from another vm.Context, and we want + // to avoid passing it objects from this Context in that case, so we remove + // the prototype from the returned object itself + the `stylize()` function, + // and remove all other non-primitives, including non-primitive user options. + if (isCrossContext) { + ObjectSetPrototypeOf(ret, null); + for (const key of ObjectKeys(ret)) { + if ((typeof ret[key] === 'object' || typeof ret[key] === 'function') && + ret[key] !== null) { + delete ret[key]; + } + } + ret.stylize = ObjectSetPrototypeOf((value, flavour) => { + let stylized; + try { + stylized = `${ctx.stylize(value, flavour)}`; + } catch { + // Continue regardless of error. + } + + if (typeof stylized !== 'string') return value; + // `stylized` is a string as it should be, which is safe to pass along. + return stylized; + }, null); + } + + return ret; +} + +/** + * Echos the value of any input. Tries to print the value out + * in the best way possible given the different types. + * + * @param {any} value The value to print out. + * @param {object} opts Optional options object that alters the output. + */ +/* Legacy: value, showHidden, depth, colors */ +function inspect(value, opts) { + // Default options + const ctx = { + budget: {}, + indentationLvl: 0, + seen: [], + currentDepth: 0, + stylize: stylizeNoColor, + showHidden: inspectDefaultOptions.showHidden, + depth: inspectDefaultOptions.depth, + colors: inspectDefaultOptions.colors, + customInspect: inspectDefaultOptions.customInspect, + showProxy: inspectDefaultOptions.showProxy, + maxArrayLength: inspectDefaultOptions.maxArrayLength, + maxStringLength: inspectDefaultOptions.maxStringLength, + breakLength: inspectDefaultOptions.breakLength, + compact: inspectDefaultOptions.compact, + sorted: inspectDefaultOptions.sorted, + getters: inspectDefaultOptions.getters, + numericSeparator: inspectDefaultOptions.numericSeparator, + }; + if (arguments.length > 1) { + // Legacy... + if (arguments.length > 2) { + if (arguments[2] !== undefined) { + ctx.depth = arguments[2]; + } + if (arguments.length > 3 && arguments[3] !== undefined) { + ctx.colors = arguments[3]; + } + } + // Set user-specified options + if (typeof opts === 'boolean') { + ctx.showHidden = opts; + } else if (opts) { + const optKeys = ObjectKeys(opts); + for (let i = 0; i < optKeys.length; ++i) { + const key = optKeys[i]; + // TODO(BridgeAR): Find a solution what to do about stylize. Either make + // this function public or add a new API with a similar or better + // functionality. + if ( + ObjectPrototypeHasOwnProperty(inspectDefaultOptions, key) || + key === 'stylize') { + ctx[key] = opts[key]; + } else if (ctx.userOptions === undefined) { + // This is required to pass through the actual user input. + ctx.userOptions = opts; + } + } + } + } + if (ctx.colors) ctx.stylize = stylizeWithColor; + if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; + if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; + return formatValue(ctx, value, 0); +} +inspect.custom = customInspectSymbol; + +ObjectDefineProperty(inspect, 'defaultOptions', { + get() { + return inspectDefaultOptions; + }, + set(options) { + validateObject(options, 'options'); + return ObjectAssign(inspectDefaultOptions, options); + } +}); + +// Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics +// Each color consists of an array with the color code as first entry and the +// reset code as second entry. +const defaultFG = 39; +const defaultBG = 49; +inspect.colors = ObjectAssign(ObjectCreate(null), { + reset: [0, 0], + bold: [1, 22], + dim: [2, 22], // Alias: faint + italic: [3, 23], + underline: [4, 24], + blink: [5, 25], + // Swap foreground and background colors + inverse: [7, 27], // Alias: swapcolors, swapColors + hidden: [8, 28], // Alias: conceal + strikethrough: [9, 29], // Alias: strikeThrough, crossedout, crossedOut + doubleunderline: [21, 24], // Alias: doubleUnderline + black: [30, defaultFG], + red: [31, defaultFG], + green: [32, defaultFG], + yellow: [33, defaultFG], + blue: [34, defaultFG], + magenta: [35, defaultFG], + cyan: [36, defaultFG], + white: [37, defaultFG], + bgBlack: [40, defaultBG], + bgRed: [41, defaultBG], + bgGreen: [42, defaultBG], + bgYellow: [43, defaultBG], + bgBlue: [44, defaultBG], + bgMagenta: [45, defaultBG], + bgCyan: [46, defaultBG], + bgWhite: [47, defaultBG], + framed: [51, 54], + overlined: [53, 55], + gray: [90, defaultFG], // Alias: grey, blackBright + redBright: [91, defaultFG], + greenBright: [92, defaultFG], + yellowBright: [93, defaultFG], + blueBright: [94, defaultFG], + magentaBright: [95, defaultFG], + cyanBright: [96, defaultFG], + whiteBright: [97, defaultFG], + bgGray: [100, defaultBG], // Alias: bgGrey, bgBlackBright + bgRedBright: [101, defaultBG], + bgGreenBright: [102, defaultBG], + bgYellowBright: [103, defaultBG], + bgBlueBright: [104, defaultBG], + bgMagentaBright: [105, defaultBG], + bgCyanBright: [106, defaultBG], + bgWhiteBright: [107, defaultBG], +}); + +function defineColorAlias(target, alias) { + ObjectDefineProperty(inspect.colors, alias, { + get() { + return this[target]; + }, + set(value) { + this[target] = value; + }, + configurable: true, + enumerable: false + }); +} + +defineColorAlias('gray', 'grey'); +defineColorAlias('gray', 'blackBright'); +defineColorAlias('bgGray', 'bgGrey'); +defineColorAlias('bgGray', 'bgBlackBright'); +defineColorAlias('dim', 'faint'); +defineColorAlias('strikethrough', 'crossedout'); +defineColorAlias('strikethrough', 'strikeThrough'); +defineColorAlias('strikethrough', 'crossedOut'); +defineColorAlias('hidden', 'conceal'); +defineColorAlias('inverse', 'swapColors'); +defineColorAlias('inverse', 'swapcolors'); +defineColorAlias('doubleunderline', 'doubleUnderline'); + +// TODO(BridgeAR): Add function style support for more complex styles. +// Don't use 'blue' not visible on cmd.exe +inspect.styles = ObjectAssign(ObjectCreate(null), { + special: 'cyan', + number: 'yellow', + bigint: 'yellow', + boolean: 'yellow', + undefined: 'grey', + null: 'bold', + string: 'green', + symbol: 'green', + date: 'magenta', + // "name": intentionally not styling + // TODO(BridgeAR): Highlight regular expressions properly. + regexp: 'red', + module: 'underline' +}); + +function addQuotes(str, quotes) { + if (quotes === -1) { + return `"${str}"`; + } + if (quotes === -2) { + return `\`${str}\``; + } + return `'${str}'`; +} + +function escapeFn(str) { + const charCode = StringPrototypeCharCodeAt(str); + return meta.length > charCode ? meta[charCode] : `\\u${charCode.toString(16)}`; +} + +// Escape control characters, single quotes and the backslash. +// This is similar to JSON stringify escaping. +function strEscape(str) { + let escapeTest = strEscapeSequencesRegExp; + let escapeReplace = strEscapeSequencesReplacer; + let singleQuote = 39; + + // Check for double quotes. If not present, do not escape single quotes and + // instead wrap the text in double quotes. If double quotes exist, check for + // backticks. If they do not exist, use those as fallback instead of the + // double quotes. + if (StringPrototypeIncludes(str, "'")) { + // This invalidates the charCode and therefore can not be matched for + // anymore. + if (!StringPrototypeIncludes(str, '"')) { + singleQuote = -1; + } else if (!StringPrototypeIncludes(str, '`') && + !StringPrototypeIncludes(str, '${')) { + singleQuote = -2; + } + if (singleQuote !== 39) { + escapeTest = strEscapeSequencesRegExpSingle; + escapeReplace = strEscapeSequencesReplacerSingle; + } + } + + // Some magic numbers that worked out fine while benchmarking with v8 6.0 + if (str.length < 5000 && !RegExpPrototypeTest(escapeTest, str)) + return addQuotes(str, singleQuote); + if (str.length > 100) { + str = StringPrototypeReplace(str, escapeReplace, escapeFn); + return addQuotes(str, singleQuote); + } + + let result = ''; + let last = 0; + for (let i = 0; i < str.length; i++) { + const point = StringPrototypeCharCodeAt(str, i); + if (point === singleQuote || + point === 92 || + point < 32 || + (point > 126 && point < 160)) { + if (last === i) { + result += meta[point]; + } else { + result += `${StringPrototypeSlice(str, last, i)}${meta[point]}`; + } + last = i + 1; + } else if (point >= 0xd800 && point <= 0xdfff) { + if (point <= 0xdbff && i + 1 < str.length) { + const point = StringPrototypeCharCodeAt(str, i + 1); + if (point >= 0xdc00 && point <= 0xdfff) { + i++; + continue; + } + } + result += `${StringPrototypeSlice(str, last, i)}${`\\u${point.toString(16)}`}`; + last = i + 1; + } + } + + if (last !== str.length) { + result += StringPrototypeSlice(str, last); + } + return addQuotes(result, singleQuote); +} + +function stylizeWithColor(str, styleType) { + const style = inspect.styles[styleType]; + if (style !== undefined) { + const color = inspect.colors[style]; + if (color !== undefined) + return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; + } + return str; +} + +function stylizeNoColor(str) { + return str; +} + +// Return a new empty array to push in the results of the default formatter. +function getEmptyFormatArray() { + return []; +} + +function isInstanceof(object, proto) { + try { + return object instanceof proto; + } catch { + return false; + } +} + +function getConstructorName(obj, ctx, recurseTimes, protoProps) { + let firstProto; + const tmp = obj; + while (obj || isUndetectableObject(obj)) { + const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); + if (descriptor !== undefined && + typeof descriptor.value === 'function' && + descriptor.value.name !== '' && + isInstanceof(tmp, descriptor.value)) { + if (protoProps !== undefined && + (firstProto !== obj || + !builtInObjects.has(descriptor.value.name))) { + addPrototypeProperties( + ctx, tmp, firstProto || tmp, recurseTimes, protoProps); + } + return descriptor.value.name; + } + + obj = ObjectGetPrototypeOf(obj); + if (firstProto === undefined) { + firstProto = obj; + } + } + + if (firstProto === null) { + return null; + } + + const res = internalGetConstructorName(tmp); + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + return `${res} `; + } + + const protoConstr = getConstructorName( + firstProto, ctx, recurseTimes + 1, protoProps); + + if (protoConstr === null) { + return `${res} <${inspect(firstProto, { + ...ctx, + customInspect: false, + depth: -1 + })}>`; + } + + return `${res} <${protoConstr}>`; +} + +// This function has the side effect of adding prototype properties to the +// `output` argument (which is an array). This is intended to highlight user +// defined prototype properties. +function addPrototypeProperties(ctx, main, obj, recurseTimes, output) { + let depth = 0; + let keys; + let keySet; + do { + if (depth !== 0 || main === obj) { + obj = ObjectGetPrototypeOf(obj); + // Stop as soon as a null prototype is encountered. + if (obj === null) { + return; + } + // Stop as soon as a built-in object type is detected. + const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); + if (descriptor !== undefined && + typeof descriptor.value === 'function' && + builtInObjects.has(descriptor.value.name)) { + return; + } + } + + if (depth === 0) { + keySet = new SafeSet(); + } else { + ArrayPrototypeForEach(keys, (key) => keySet.add(key)); + } + // Get all own property names and symbols. + keys = ReflectOwnKeys(obj); + ArrayPrototypePush(ctx.seen, main); + for (const key of keys) { + // Ignore the `constructor` property and keys that exist on layers above. + if (key === 'constructor' || + ObjectPrototypeHasOwnProperty(main, key) || + (depth !== 0 && keySet.has(key))) { + continue; + } + const desc = ObjectGetOwnPropertyDescriptor(obj, key); + if (typeof desc.value === 'function') { + continue; + } + const value = formatProperty( + ctx, obj, recurseTimes, key, kObjectType, desc, main); + if (ctx.colors) { + // Faint! + ArrayPrototypePush(output, `\u001b[2m${value}\u001b[22m`); + } else { + ArrayPrototypePush(output, value); + } + } + ArrayPrototypePop(ctx.seen); + // Limit the inspection to up to three prototype layers. Using `recurseTimes` + // is not a good choice here, because it's as if the properties are declared + // on the current object from the users perspective. + } while (++depth !== 3); +} + +function getPrefix(constructor, tag, fallback, size = '') { + if (constructor === null) { + if (tag !== '' && fallback !== tag) { + return `[${fallback}${size}: null prototype] [${tag}] `; + } + return `[${fallback}${size}: null prototype] `; + } + + if (tag !== '' && constructor !== tag) { + return `${constructor}${size} [${tag}] `; + } + return `${constructor}${size} `; +} + +// Look up the keys of the object. +function getKeys(value, showHidden) { + let keys; + const symbols = ObjectGetOwnPropertySymbols(value); + if (showHidden) { + keys = ObjectGetOwnPropertyNames(value); + if (symbols.length !== 0) + ArrayPrototypePushApply(keys, symbols); + } else { + // This might throw if `value` is a Module Namespace Object from an + // unevaluated module, but we don't want to perform the actual type + // check because it's expensive. + // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 + // and modify this logic as needed. + try { + keys = ObjectKeys(value); + } catch (err) { + assert(isNativeError(err) && err.name === 'ReferenceError' && + isModuleNamespaceObject(value)); + keys = ObjectGetOwnPropertyNames(value); + } + if (symbols.length !== 0) { + const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); + ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter)); + } + } + return keys; +} + +function getCtxStyle(value, constructor, tag) { + let fallback = ''; + if (constructor === null) { + fallback = internalGetConstructorName(value); + if (fallback === tag) { + fallback = 'Object'; + } + } + return getPrefix(constructor, tag, fallback); +} + +function formatProxy(ctx, proxy, recurseTimes) { + if (recurseTimes > ctx.depth && ctx.depth !== null) { + return ctx.stylize('Proxy [Array]', 'special'); + } + recurseTimes += 1; + ctx.indentationLvl += 2; + const res = [ + formatValue(ctx, proxy[0], recurseTimes), + formatValue(ctx, proxy[1], recurseTimes), + ]; + ctx.indentationLvl -= 2; + return reduceToSingleString( + ctx, res, '', ['Proxy [', ']'], kArrayExtrasType, recurseTimes); +} + +// Note: using `formatValue` directly requires the indentation level to be +// corrected by setting `ctx.indentationLvL += diff` and then to decrease the +// value afterwards again. +function formatValue(ctx, value, recurseTimes, typedArray) { + // Primitive types cannot have properties. + if (typeof value !== 'object' && + typeof value !== 'function' && + !isUndetectableObject(value)) { + return formatPrimitive(ctx.stylize, value, ctx); + } + if (value === null) { + return ctx.stylize('null', 'null'); + } + + // Memorize the context for custom inspection on proxies. + const context = value; + // Always check for proxies to prevent side effects and to prevent triggering + // any proxy handlers. + const proxy = getProxyDetails(value, !!ctx.showProxy); + if (proxy !== undefined) { + if (ctx.showProxy) { + return formatProxy(ctx, proxy, recurseTimes); + } + value = proxy; + } + + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it. + if (ctx.customInspect) { + const maybeCustom = value[customInspectSymbol]; + if (typeof maybeCustom === 'function' && + // Filter out the util module, its inspect function is special. + maybeCustom !== inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value)) { + // This makes sure the recurseTimes are reported as before while using + // a counter internally. + const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; + const isCrossContext = + proxy !== undefined || !(context instanceof Object); + const ret = FunctionPrototypeCall( + maybeCustom, + context, + depth, + getUserOptions(ctx, isCrossContext), + inspect + ); + // If the custom inspection method returned `this`, don't go into + // infinite recursion. + if (ret !== context) { + if (typeof ret !== 'string') { + return formatValue(ctx, ret, recurseTimes); + } + return ret.replace(/\n/g, `\n${' '.repeat(ctx.indentationLvl)}`); + } + } + } + + // Using an array here is actually better for the average case than using + // a Set. `seen` will only check for the depth and will never grow too large. + if (ctx.seen.includes(value)) { + let index = 1; + if (ctx.circular === undefined) { + ctx.circular = new SafeMap(); + ctx.circular.set(value, index); + } else { + index = ctx.circular.get(value); + if (index === undefined) { + index = ctx.circular.size + 1; + ctx.circular.set(value, index); + } + } + return ctx.stylize(`[Circular *${index}]`, 'special'); + } + + return formatRaw(ctx, value, recurseTimes, typedArray); +} + +function formatRaw(ctx, value, recurseTimes, typedArray) { + let keys; + let protoProps; + if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { + protoProps = []; + } + + const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); + // Reset the variable to check for this later on. + if (protoProps !== undefined && protoProps.length === 0) { + protoProps = undefined; + } + + let tag = value[SymbolToStringTag]; + // Only list the tag in case it's non-enumerable / not an own property. + // Otherwise we'd print this twice. + if (typeof tag !== 'string' || + (tag !== '' && + (ctx.showHidden ? + ObjectPrototypeHasOwnProperty : + ObjectPrototypePropertyIsEnumerable)( + value, SymbolToStringTag + ))) { + tag = ''; + } + let base = ''; + let formatter = getEmptyFormatArray; + let braces; + let noIterator = true; + let i = 0; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + + let extrasType = kObjectType; + + // Iterators and the rest are split to reduce checks. + // We have to check all values in case the constructor is set to null. + // Otherwise it would not possible to identify all types properly. + if (value[SymbolIterator] || constructor === null) { + noIterator = false; + if (ArrayIsArray(value)) { + // Only set the constructor for non ordinary ("Array [...]") arrays. + const prefix = (constructor !== 'Array' || tag !== '') ? + getPrefix(constructor, tag, 'Array', `(${value.length})`) : + ''; + keys = getOwnNonIndexProperties(value, filter); + braces = [`${prefix}[`, ']']; + if (value.length === 0 && keys.length === 0 && protoProps === undefined) + return `${braces[0]}]`; + extrasType = kArrayExtrasType; + formatter = formatArray; + } else if (isSet(value)) { + const size = SetPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, 'Set', `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null ? + formatSet.bind(null, value) : + formatSet.bind(null, SetPrototypeValues(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) + return `${prefix}{}`; + braces = [`${prefix}{`, '}']; + } else if (isMap(value)) { + const size = MapPrototypeGetSize(value); + const prefix = getPrefix(constructor, tag, 'Map', `(${size})`); + keys = getKeys(value, ctx.showHidden); + formatter = constructor !== null ? + formatMap.bind(null, value) : + formatMap.bind(null, MapPrototypeEntries(value)); + if (size === 0 && keys.length === 0 && protoProps === undefined) + return `${prefix}{}`; + braces = [`${prefix}{`, '}']; + } else if (isTypedArray(value)) { + keys = getOwnNonIndexProperties(value, filter); + let bound = value; + let fallback = ''; + if (constructor === null) { + fallback = TypedArrayPrototypeGetSymbolToStringTag(value); + // Reconstruct the array information. + bound = new primordials[fallback](value); + } + const size = TypedArrayPrototypeGetLength(value); + const prefix = getPrefix(constructor, tag, fallback, `(${size})`); + braces = [`${prefix}[`, ']']; + if (value.length === 0 && keys.length === 0 && !ctx.showHidden) + return `${braces[0]}]`; + // Special handle the value. The original value is required below. The + // bound function is required to reconstruct missing information. + formatter = formatTypedArray.bind(null, bound, size); + extrasType = kArrayExtrasType; + } else if (isMapIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces('Map', tag); + // Add braces to the formatter parameters. + formatter = formatIterator.bind(null, braces); + } else if (isSetIterator(value)) { + keys = getKeys(value, ctx.showHidden); + braces = getIteratorBraces('Set', tag); + // Add braces to the formatter parameters. + formatter = formatIterator.bind(null, braces); + } else { + noIterator = true; + } + } + if (noIterator) { + keys = getKeys(value, ctx.showHidden); + braces = ['{', '}']; + if (constructor === 'Object') { + if (isArgumentsObject(value)) { + braces[0] = '[Arguments] {'; + } else if (tag !== '') { + braces[0] = `${getPrefix(constructor, tag, 'Object')}{`; + } + if (keys.length === 0 && protoProps === undefined) { + return `${braces[0]}}`; + } + } else if (typeof value === 'function') { + base = getFunctionBase(value, constructor, tag); + if (keys.length === 0 && protoProps === undefined) + return ctx.stylize(base, 'special'); + } else if (isRegExp(value)) { + // Make RegExps say that they are RegExps + base = RegExpPrototypeToString( + constructor !== null ? value : new RegExp(value) + ); + const prefix = getPrefix(constructor, tag, 'RegExp'); + if (prefix !== 'RegExp ') + base = `${prefix}${base}`; + if ((keys.length === 0 && protoProps === undefined) || + (recurseTimes > ctx.depth && ctx.depth !== null)) { + return ctx.stylize(base, 'regexp'); + } + } else if (isDate(value)) { + // Make dates with properties first say the date + base = NumberIsNaN(DatePrototypeGetTime(value)) ? + DatePrototypeToString(value) : + DatePrototypeToISOString(value); + const prefix = getPrefix(constructor, tag, 'Date'); + if (prefix !== 'Date ') + base = `${prefix}${base}`; + if (keys.length === 0 && protoProps === undefined) { + return ctx.stylize(base, 'date'); + } + } else if (isError(value)) { + base = formatError(value, constructor, tag, ctx, keys); + if (keys.length === 0 && protoProps === undefined) + return base; + } else if (isAnyArrayBuffer(value)) { + // Fast path for ArrayBuffer and SharedArrayBuffer. + // Can't do the same for DataView because it has a non-primitive + // .buffer property that we need to recurse for. + const arrayType = isArrayBuffer(value) ? 'ArrayBuffer' : + 'SharedArrayBuffer'; + const prefix = getPrefix(constructor, tag, arrayType); + if (typedArray === undefined) { + formatter = formatArrayBuffer; + } else if (keys.length === 0 && protoProps === undefined) { + return prefix + + `{ byteLength: ${formatNumber(ctx.stylize, value.byteLength, false)} }`; + } + braces[0] = `${prefix}{`; + ArrayPrototypeUnshift(keys, 'byteLength'); + } else if (isDataView(value)) { + braces[0] = `${getPrefix(constructor, tag, 'DataView')}{`; + // .buffer goes last, it's not a primitive like the others. + ArrayPrototypeUnshift(keys, 'byteLength', 'byteOffset', 'buffer'); + } else if (isPromise(value)) { + braces[0] = `${getPrefix(constructor, tag, 'Promise')}{`; + formatter = formatPromise; + } else if (isWeakSet(value)) { + braces[0] = `${getPrefix(constructor, tag, 'WeakSet')}{`; + formatter = ctx.showHidden ? formatWeakSet : formatWeakCollection; + } else if (isWeakMap(value)) { + braces[0] = `${getPrefix(constructor, tag, 'WeakMap')}{`; + formatter = ctx.showHidden ? formatWeakMap : formatWeakCollection; + } else if (isModuleNamespaceObject(value)) { + braces[0] = `${getPrefix(constructor, tag, 'Module')}{`; + // Special handle keys for namespace objects. + formatter = formatNamespaceObject.bind(null, keys); + } else if (isBoxedPrimitive(value)) { + base = getBoxedBase(value, ctx, keys, constructor, tag); + if (keys.length === 0 && protoProps === undefined) { + return base; + } + } else { + if (keys.length === 0 && protoProps === undefined) { + if (isExternal(value)) { + const address = getExternalValue(value).toString(16); + return ctx.stylize(`[External: ${address}]`, 'special'); + } + return `${getCtxStyle(value, constructor, tag)}{}`; + } + braces[0] = `${getCtxStyle(value, constructor, tag)}{`; + } + } + + if (recurseTimes > ctx.depth && ctx.depth !== null) { + let constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); + if (constructor !== null) + constructorName = `[${constructorName}]`; + return ctx.stylize(constructorName, 'special'); + } + recurseTimes += 1; + + ctx.seen.push(value); + ctx.currentDepth = recurseTimes; + let output; + const indentationLvl = ctx.indentationLvl; + try { + output = formatter(ctx, value, recurseTimes); + for (i = 0; i < keys.length; i++) { + output.push( + formatProperty(ctx, value, recurseTimes, keys[i], extrasType)); + } + if (protoProps !== undefined) { + output.push(...protoProps); + } + } catch (err) { + const constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); + return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); + } + if (ctx.circular !== undefined) { + const index = ctx.circular.get(value); + if (index !== undefined) { + const reference = ctx.stylize(``, 'special'); + // Add reference always to the very beginning of the output. + if (ctx.compact !== true) { + base = base === '' ? reference : `${reference} ${base}`; + } else { + braces[0] = `${reference} ${braces[0]}`; + } + } + } + ctx.seen.pop(); + + if (ctx.sorted) { + const comparator = ctx.sorted === true ? undefined : ctx.sorted; + if (extrasType === kObjectType) { + output = output.sort(comparator); + } else if (keys.length > 1) { + const sorted = output.slice(output.length - keys.length).sort(comparator); + output.splice(output.length - keys.length, keys.length, ...sorted); + } + } + + const res = reduceToSingleString( + ctx, output, base, braces, extrasType, recurseTimes, value); + const budget = ctx.budget[ctx.indentationLvl] || 0; + const newLength = budget + res.length; + ctx.budget[ctx.indentationLvl] = newLength; + // If any indentationLvl exceeds this limit, limit further inspecting to the + // minimum. Otherwise the recursive algorithm might continue inspecting the + // object even though the maximum string size (~2 ** 28 on 32 bit systems and + // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at + // exactly 2 ** 27 but a bit higher. This depends on the object shape. + // This limit also makes sure that huge objects don't block the event loop + // significantly. + if (newLength > 2 ** 27) { + ctx.depth = -1; + } + return res; +} + +function getIteratorBraces(type, tag) { + if (tag !== `${type} Iterator`) { + if (tag !== '') + tag += '] ['; + tag += `${type} Iterator`; + } + return [`[${tag}] {`, '}']; +} + +function getBoxedBase(value, ctx, keys, constructor, tag) { + let fn; + let type; + if (isNumberObject(value)) { + fn = NumberPrototypeValueOf; + type = 'Number'; + } else if (isStringObject(value)) { + fn = StringPrototypeValueOf; + type = 'String'; + // For boxed Strings, we have to remove the 0-n indexed entries, + // since they just noisy up the output and are redundant + // Make boxed primitive Strings look like such + keys.splice(0, value.length); + } else if (isBooleanObject(value)) { + fn = BooleanPrototypeValueOf; + type = 'Boolean'; + } else if (isBigIntObject(value)) { + fn = BigIntPrototypeValueOf; + type = 'BigInt'; + } else { + fn = SymbolPrototypeValueOf; + type = 'Symbol'; + } + let base = `[${type}`; + if (type !== constructor) { + if (constructor === null) { + base += ' (null prototype)'; + } else { + base += ` (${constructor})`; + } + } + base += `: ${formatPrimitive(stylizeNoColor, fn(value), ctx)}]`; + if (tag !== '' && tag !== constructor) { + base += ` [${tag}]`; + } + if (keys.length !== 0 || ctx.stylize === stylizeNoColor) + return base; + return ctx.stylize(base, StringPrototypeToLowerCase(type)); +} + +function getClassBase(value, constructor, tag) { + const hasName = ObjectPrototypeHasOwnProperty(value, 'name'); + const name = (hasName && value.name) || '(anonymous)'; + let base = `class ${name}`; + if (constructor !== 'Function' && constructor !== null) { + base += ` [${constructor}]`; + } + if (tag !== '' && constructor !== tag) { + base += ` [${tag}]`; + } + if (constructor !== null) { + const superName = ObjectGetPrototypeOf(value).name; + if (superName) { + base += ` extends ${superName}`; + } + } else { + base += ' extends [null prototype]'; + } + return `[${base}]`; +} + +function getFunctionBase(value, constructor, tag) { + const stringified = FunctionPrototypeToString(value); + if (stringified.startsWith('class') && stringified.endsWith('}')) { + const slice = stringified.slice(5, -1); + const bracketIndex = slice.indexOf('{'); + if (bracketIndex !== -1 && + (!slice.slice(0, bracketIndex).includes('(') || + // Slow path to guarantee that it's indeed a class. + classRegExp.test(slice.replace(stripCommentsRegExp)))) { + return getClassBase(value, constructor, tag); + } + } + let type = 'Function'; + if (isGeneratorFunction(value)) { + type = `Generator${type}`; + } + if (isAsyncFunction(value)) { + type = `Async${type}`; + } + let base = `[${type}`; + if (constructor === null) { + base += ' (null prototype)'; + } + if (value.name === '') { + base += ' (anonymous)'; + } else { + base += `: ${value.name}`; + } + base += ']'; + if (constructor !== type && constructor !== null) { + base += ` ${constructor}`; + } + if (tag !== '' && constructor !== tag) { + base += ` [${tag}]`; + } + return base; +} + +function identicalSequenceRange(a, b) { + for (let i = 0; i < a.length - 3; i++) { + // Find the first entry of b that matches the current entry of a. + const pos = b.indexOf(a[i]); + if (pos !== -1) { + const rest = b.length - pos; + if (rest > 3) { + let len = 1; + const maxLen = MathMin(a.length - i, rest); + // Count the number of consecutive entries. + while (maxLen > len && a[i + len] === b[pos + len]) { + len++; + } + if (len > 3) { + return { len, offset: i }; + } + } + } + } + + return { len: 0, offset: 0 }; +} + +function getStackString(error) { + return error.stack ? String(error.stack) : ErrorPrototypeToString(error); +} + +function getStackFrames(ctx, err, stack) { + const frames = stack.split('\n'); + + // Remove stack frames identical to frames in cause. + if (err.cause && isError(err.cause)) { + const causeStack = getStackString(err.cause); + const causeStackStart = causeStack.indexOf('\n at'); + if (causeStackStart !== -1) { + const causeFrames = causeStack.slice(causeStackStart + 1).split('\n'); + const { len, offset } = identicalSequenceRange(frames, causeFrames); + if (len > 0) { + const skipped = len - 2; + const msg = ` ... ${skipped} lines matching cause stack trace ...`; + frames.splice(offset + 1, skipped, ctx.stylize(msg, 'undefined')); + } + } + } + return frames; +} + +function improveStack(stack, constructor, name, tag) { + // A stack trace may contain arbitrary data. Only manipulate the output + // for "regular errors" (errors that "look normal") for now. + let len = name.length; + + if (constructor === null || + (name.endsWith('Error') && + stack.startsWith(name) && + (stack.length === len || stack[len] === ':' || stack[len] === '\n'))) { + let fallback = 'Error'; + if (constructor === null) { + const start = stack.match(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/) || + stack.match(/^([a-z_A-Z0-9-]*Error)$/); + fallback = (start && start[1]) || ''; + len = fallback.length; + fallback = fallback || 'Error'; + } + const prefix = getPrefix(constructor, tag, fallback).slice(0, -1); + if (name !== prefix) { + if (prefix.includes(name)) { + if (len === 0) { + stack = `${prefix}: ${stack}`; + } else { + stack = `${prefix}${stack.slice(len)}`; + } + } else { + stack = `${prefix} [${name}]${stack.slice(len)}`; + } + } + } + return stack; +} + +function removeDuplicateErrorKeys(ctx, keys, err, stack) { + if (!ctx.showHidden && keys.length !== 0) { + for (const name of ['name', 'message', 'stack']) { + const index = keys.indexOf(name); + // Only hide the property in case it's part of the original stack + if (index !== -1 && stack.includes(err[name])) { + keys.splice(index, 1); + } + } + } +} + +function formatError(err, constructor, tag, ctx, keys) { + const name = err.name != null ? String(err.name) : 'Error'; + let stack = getStackString(err); + + removeDuplicateErrorKeys(ctx, keys, err, stack); + + if ('cause' in err && + (keys.length === 0 || !keys.includes('cause'))) { + keys.push('cause'); + } + + stack = improveStack(stack, constructor, name, tag); + + // Ignore the error message if it's contained in the stack. + let pos = (err.message && stack.indexOf(err.message)) || -1; + if (pos !== -1) + pos += err.message.length; + // Wrap the error in brackets in case it has no stack trace. + const stackStart = stack.indexOf('\n at', pos); + if (stackStart === -1) { + stack = `[${stack}]`; + } else { + let newStack = stack.slice(0, stackStart); + const lines = getStackFrames(ctx, err, stack.slice(stackStart + 1)); + if (ctx.colors) { + // Highlight userland code and node modules. + for (const line of lines) { + const core = line.match(coreModuleRegExp); + if (core !== null && NativeModule.exists(core[1])) { + newStack += `\n${ctx.stylize(line, 'undefined')}`; + } else { + // This adds underscores to all node_modules to quickly identify them. + let nodeModule; + newStack += '\n'; + let pos = 0; + while ((nodeModule = nodeModulesRegExp.exec(line)) !== null) { + // '/node_modules/'.length === 14 + newStack += line.slice(pos, nodeModule.index + 14); + newStack += ctx.stylize(nodeModule[1], 'module'); + pos = nodeModule.index + nodeModule[0].length; + } + newStack += pos === 0 ? line : line.slice(pos); + } + } + } else { + newStack += `\n${lines.join('\n')}`; + } + stack = newStack; + } + // The message and the stack have to be indented as well! + if (ctx.indentationLvl !== 0) { + const indentation = ' '.repeat(ctx.indentationLvl); + stack = stack.replace(/\n/g, `\n${indentation}`); + } + return stack; +} + +function groupArrayElements(ctx, output, value) { + let totalLength = 0; + let maxLength = 0; + let i = 0; + let outputLength = output.length; + if (ctx.maxArrayLength < output.length) { + // This makes sure the "... n more items" part is not taken into account. + outputLength--; + } + const separatorSpace = 2; // Add 1 for the space and 1 for the separator. + const dataLen = new Array(outputLength); + // Calculate the total length of all output entries and the individual max + // entries length of all output entries. We have to remove colors first, + // otherwise the length would not be calculated properly. + for (; i < outputLength; i++) { + const len = getStringWidth(output[i], ctx.colors); + dataLen[i] = len; + totalLength += len + separatorSpace; + if (maxLength < len) + maxLength = len; + } + // Add two to `maxLength` as we add a single whitespace character plus a comma + // in-between two entries. + const actualMax = maxLength + separatorSpace; + // Check if at least three entries fit next to each other and prevent grouping + // of arrays that contains entries of very different length (i.e., if a single + // entry is longer than 1/5 of all other entries combined). Otherwise the + // space in-between small entries would be enormous. + if (actualMax * 3 + ctx.indentationLvl < ctx.breakLength && + (totalLength / actualMax > 5 || maxLength <= 6)) { + + const approxCharHeights = 2.5; + const averageBias = MathSqrt(actualMax - totalLength / output.length); + const biasedMax = MathMax(actualMax - 3 - averageBias, 1); + // Dynamically check how many columns seem possible. + const columns = MathMin( + // Ideally a square should be drawn. We expect a character to be about 2.5 + // times as high as wide. This is the area formula to calculate a square + // which contains n rectangles of size `actualMax * approxCharHeights`. + // Divide that by `actualMax` to receive the correct number of columns. + // The added bias increases the columns for short entries. + MathRound( + MathSqrt( + approxCharHeights * biasedMax * outputLength + ) / biasedMax + ), + // Do not exceed the breakLength. + MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), + // Limit array grouping for small `compact` modes as the user requested + // minimal grouping. + ctx.compact * 4, + // Limit the columns to a maximum of fifteen. + 15 + ); + // Return with the original output if no grouping should happen. + if (columns <= 1) { + return output; + } + const tmp = []; + const maxLineLength = []; + for (let i = 0; i < columns; i++) { + let lineMaxLength = 0; + for (let j = i; j < output.length; j += columns) { + if (dataLen[j] > lineMaxLength) + lineMaxLength = dataLen[j]; + } + lineMaxLength += separatorSpace; + maxLineLength[i] = lineMaxLength; + } + let order = StringPrototypePadStart; + if (value !== undefined) { + for (let i = 0; i < output.length; i++) { + if (typeof value[i] !== 'number' && typeof value[i] !== 'bigint') { + order = StringPrototypePadEnd; + break; + } + } + } + // Each iteration creates a single line of grouped entries. + for (let i = 0; i < outputLength; i += columns) { + // The last lines may contain less entries than columns. + const max = MathMin(i + columns, outputLength); + let str = ''; + let j = i; + for (; j < max - 1; j++) { + // Calculate extra color padding in case it's active. This has to be + // done line by line as some lines might contain more colors than + // others. + const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; + str += order(`${output[j]}, `, padding, ' '); + } + if (order === StringPrototypePadStart) { + const padding = maxLineLength[j - i] + + output[j].length - + dataLen[j] - + separatorSpace; + str += StringPrototypePadStart(output[j], padding, ' '); + } else { + str += output[j]; + } + ArrayPrototypePush(tmp, str); + } + if (ctx.maxArrayLength < output.length) { + ArrayPrototypePush(tmp, output[outputLength]); + } + output = tmp; + } + return output; +} + +function handleMaxCallStackSize(ctx, err, constructorName, indentationLvl) { + if (isStackOverflowError(err)) { + ctx.seen.pop(); + ctx.indentationLvl = indentationLvl; + return ctx.stylize( + `[${constructorName}: Inspection interrupted ` + + 'prematurely. Maximum call stack size exceeded.]', + 'special' + ); + } + /* c8 ignore next */ + assert.fail(err.stack); +} + +function addNumericSeparator(integerString) { + let result = ''; + let i = integerString.length; + const start = integerString.startsWith('-') ? 1 : 0; + for (; i >= start + 4; i -= 3) { + result = `_${integerString.slice(i - 3, i)}${result}`; + } + return i === integerString.length ? + integerString : + `${integerString.slice(0, i)}${result}`; +} + +function addNumericSeparatorEnd(integerString) { + let result = ''; + let i = 0; + for (; i < integerString.length - 3; i += 3) { + result += `${integerString.slice(i, i + 3)}_`; + } + return i === 0 ? + integerString : + `${result}${integerString.slice(i)}`; +} + +function formatNumber(fn, number, numericSeparator) { + if (!numericSeparator) { + // Format -0 as '-0'. Checking `number === -0` won't distinguish 0 from -0. + if (ObjectIs(number, -0)) { + return fn('-0', 'number'); + } + return fn(`${number}`, 'number'); + } + const integer = MathTrunc(number); + const string = String(integer); + if (integer === number) { + if (!NumberIsFinite(number) || string.includes('e')) { + return fn(string, 'number'); + } + return fn(`${addNumericSeparator(string)}`, 'number'); + } + if (NumberIsNaN(number)) { + return fn(string, 'number'); + } + return fn(`${ + addNumericSeparator(string) + }.${ + addNumericSeparatorEnd(String(number).slice(string.length + 1)) + }`, 'number'); +} + +function formatBigInt(fn, bigint, numericSeparator) { + const string = String(bigint); + if (!numericSeparator) { + return fn(`${string}n`, 'bigint'); + } + return fn(`${addNumericSeparator(string)}n`, 'bigint'); +} + +function formatPrimitive(fn, value, ctx) { + if (typeof value === 'string') { + let trailer = ''; + if (value.length > ctx.maxStringLength) { + const remaining = value.length - ctx.maxStringLength; + value = value.slice(0, ctx.maxStringLength); + trailer = `... ${remaining} more character${remaining > 1 ? 's' : ''}`; + } + if (ctx.compact !== true && + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. + value.length > kMinLineLength && + value.length > ctx.breakLength - ctx.indentationLvl - 4) { + return value + .split(/(?<=\n)/) + .map((line) => fn(strEscape(line), 'string')) + .join(` +\n${' '.repeat(ctx.indentationLvl + 2)}`) + trailer; + } + return fn(strEscape(value), 'string') + trailer; + } + if (typeof value === 'number') + return formatNumber(fn, value, ctx.numericSeparator); + if (typeof value === 'bigint') + return formatBigInt(fn, value, ctx.numericSeparator); + if (typeof value === 'boolean') + return fn(`${value}`, 'boolean'); + if (typeof value === 'undefined') + return fn('undefined', 'undefined'); + // es6 symbol primitive + return fn(SymbolPrototypeToString(value), 'symbol'); +} + +function formatNamespaceObject(keys, ctx, value, recurseTimes) { + const output = new Array(keys.length); + for (let i = 0; i < keys.length; i++) { + try { + output[i] = formatProperty(ctx, value, recurseTimes, keys[i], + kObjectType); + } catch (err) { + assert(isNativeError(err) && err.name === 'ReferenceError'); + // Use the existing functionality. This makes sure the indentation and + // line breaks are always correct. Otherwise it is very difficult to keep + // this aligned, even though this is a hacky way of dealing with this. + const tmp = { [keys[i]]: '' }; + output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); + const pos = output[i].lastIndexOf(' '); + // We have to find the last whitespace and have to replace that value as + // it will be visualized as a regular string. + output[i] = output[i].slice(0, pos + 1) + + ctx.stylize('', 'special'); + } + } + // Reset the keys to an empty array. This prevents duplicated inspection. + keys.length = 0; + return output; +} + +// The array is sparse and/or has extra keys +function formatSpecialArray(ctx, value, recurseTimes, maxLength, output, i) { + const keys = ObjectKeys(value); + let index = i; + for (; i < keys.length && output.length < maxLength; i++) { + const key = keys[i]; + const tmp = +key; + // Arrays can only have up to 2^32 - 1 entries + if (tmp > 2 ** 32 - 2) { + break; + } + if (`${index}` !== key) { + if (!numberRegExp.test(key)) { + break; + } + const emptyItems = tmp - index; + const ending = emptyItems > 1 ? 's' : ''; + const message = `<${emptyItems} empty item${ending}>`; + output.push(ctx.stylize(message, 'undefined')); + index = tmp; + if (output.length === maxLength) { + break; + } + } + output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); + index++; + } + const remaining = value.length - index; + if (output.length !== maxLength) { + if (remaining > 0) { + const ending = remaining > 1 ? 's' : ''; + const message = `<${remaining} empty item${ending}>`; + output.push(ctx.stylize(message, 'undefined')); + } + } else if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); + } + return output; +} + +function formatArrayBuffer(ctx, value) { + let buffer; + try { + buffer = new Uint8Array(value); + } catch { + return [ctx.stylize('(detached)', 'special')]; + } + if (hexSlice === undefined) + hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); + let str = StringPrototypeTrim(StringPrototypeReplace( + hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)), + /(.{2})/g, '$1 ')); + const remaining = buffer.length - ctx.maxArrayLength; + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + return [`${ctx.stylize('[Uint8Contents]', 'special')}: <${str}>`]; +} + +function formatArray(ctx, value, recurseTimes) { + const valLen = value.length; + const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); + + const remaining = valLen - len; + const output = []; + for (let i = 0; i < len; i++) { + // Special handle sparse arrays. + if (!ObjectPrototypeHasOwnProperty(value, i)) { + return formatSpecialArray(ctx, value, recurseTimes, len, output, i); + } + output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); + } + if (remaining > 0) + output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); + return output; +} + +function formatTypedArray(value, length, ctx, ignored, recurseTimes) { + const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), length); + const remaining = value.length - maxLength; + const output = new Array(maxLength); + const elementFormatter = value.length > 0 && typeof value[0] === 'number' ? + formatNumber : + formatBigInt; + for (let i = 0; i < maxLength; ++i) { + output[i] = elementFormatter(ctx.stylize, value[i], ctx.numericSeparator); + } + if (remaining > 0) { + output[maxLength] = `... ${remaining} more item${remaining > 1 ? 's' : ''}`; + } + if (ctx.showHidden) { + // .buffer goes last, it's not a primitive like the others. + // All besides `BYTES_PER_ELEMENT` are actually getters. + ctx.indentationLvl += 2; + for (const key of [ + 'BYTES_PER_ELEMENT', + 'length', + 'byteLength', + 'byteOffset', + 'buffer', + ]) { + const str = formatValue(ctx, value[key], recurseTimes, true); + ArrayPrototypePush(output, `[${key}]: ${str}`); + } + ctx.indentationLvl -= 2; + } + return output; +} + +function formatSet(value, ctx, ignored, recurseTimes) { + const output = []; + ctx.indentationLvl += 2; + for (const v of value) { + ArrayPrototypePush(output, formatValue(ctx, v, recurseTimes)); + } + ctx.indentationLvl -= 2; + return output; +} + +function formatMap(value, ctx, ignored, recurseTimes) { + const output = []; + ctx.indentationLvl += 2; + for (const { 0: k, 1: v } of value) { + output.push( + `${formatValue(ctx, k, recurseTimes)} => ${formatValue(ctx, v, recurseTimes)}` + ); + } + ctx.indentationLvl -= 2; + return output; +} + +function formatSetIterInner(ctx, recurseTimes, entries, state) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + const maxLength = MathMin(maxArrayLength, entries.length); + const output = new Array(maxLength); + ctx.indentationLvl += 2; + for (let i = 0; i < maxLength; i++) { + output[i] = formatValue(ctx, entries[i], recurseTimes); + } + ctx.indentationLvl -= 2; + if (state === kWeak && !ctx.sorted) { + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + ArrayPrototypeSort(output); + } + const remaining = entries.length - maxLength; + if (remaining > 0) { + ArrayPrototypePush(output, + `... ${remaining} more item${remaining > 1 ? 's' : ''}`); + } + return output; +} + +function formatMapIterInner(ctx, recurseTimes, entries, state) { + const maxArrayLength = MathMax(ctx.maxArrayLength, 0); + // Entries exist as [key1, val1, key2, val2, ...] + const len = entries.length / 2; + const remaining = len - maxArrayLength; + const maxLength = MathMin(maxArrayLength, len); + let output = new Array(maxLength); + let i = 0; + ctx.indentationLvl += 2; + if (state === kWeak) { + for (; i < maxLength; i++) { + const pos = i * 2; + output[i] = + `${formatValue(ctx, entries[pos], recurseTimes)} => ${formatValue(ctx, entries[pos + 1], recurseTimes)}`; + } + // Sort all entries to have a halfway reliable output (if more entries than + // retrieved ones exist, we can not reliably return the same output) if the + // output is not sorted anyway. + if (!ctx.sorted) + output = output.sort(); + } else { + for (; i < maxLength; i++) { + const pos = i * 2; + const res = [ + formatValue(ctx, entries[pos], recurseTimes), + formatValue(ctx, entries[pos + 1], recurseTimes), + ]; + output[i] = reduceToSingleString( + ctx, res, '', ['[', ']'], kArrayExtrasType, recurseTimes); + } + } + ctx.indentationLvl -= 2; + if (remaining > 0) { + output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); + } + return output; +} + +function formatWeakCollection(ctx) { + return [ctx.stylize('', 'special')]; +} + +function formatWeakSet(ctx, value, recurseTimes) { + const entries = previewEntries(value); + return formatSetIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatWeakMap(ctx, value, recurseTimes) { + const entries = previewEntries(value); + return formatMapIterInner(ctx, recurseTimes, entries, kWeak); +} + +function formatIterator(braces, ctx, value, recurseTimes) { + const { 0: entries, 1: isKeyValue } = previewEntries(value, true); + if (isKeyValue) { + // Mark entry iterators as such. + braces[0] = braces[0].replace(/ Iterator] {$/, ' Entries] {'); + return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); + } + + return formatSetIterInner(ctx, recurseTimes, entries, kIterator); +} + +function formatPromise(ctx, value, recurseTimes) { + let output; + const { 0: state, 1: result } = getPromiseDetails(value); + if (state === kPending) { + output = [ctx.stylize('', 'special')]; + } else { + ctx.indentationLvl += 2; + const str = formatValue(ctx, result, recurseTimes); + ctx.indentationLvl -= 2; + output = [ + state === kRejected ? + `${ctx.stylize('', 'special')} ${str}` : + str, + ]; + } + return output; +} + +function formatProperty(ctx, value, recurseTimes, key, type, desc, + original = value) { + let name, str; + let extra = ' '; + desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || + { value: value[key], enumerable: true }; + if (desc.value !== undefined) { + const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; + ctx.indentationLvl += diff; + str = formatValue(ctx, desc.value, recurseTimes); + if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { + extra = `\n${' '.repeat(ctx.indentationLvl)}`; + } + ctx.indentationLvl -= diff; + } else if (desc.get !== undefined) { + const label = desc.set !== undefined ? 'Getter/Setter' : 'Getter'; + const s = ctx.stylize; + const sp = 'special'; + if (ctx.getters && (ctx.getters === true || + (ctx.getters === 'get' && desc.set === undefined) || + (ctx.getters === 'set' && desc.set !== undefined))) { + try { + const tmp = FunctionPrototypeCall(desc.get, original); + ctx.indentationLvl += 2; + if (tmp === null) { + str = `${s(`[${label}:`, sp)} ${s('null', 'null')}${s(']', sp)}`; + } else if (typeof tmp === 'object') { + str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; + } else { + const primitive = formatPrimitive(s, tmp, ctx); + str = `${s(`[${label}:`, sp)} ${primitive}${s(']', sp)}`; + } + ctx.indentationLvl -= 2; + } catch (err) { + const message = ``; + str = `${s(`[${label}:`, sp)} ${message}${s(']', sp)}`; + } + } else { + str = ctx.stylize(`[${label}]`, sp); + } + } else if (desc.set !== undefined) { + str = ctx.stylize('[Setter]', 'special'); + } else { + str = ctx.stylize('undefined', 'undefined'); + } + if (type === kArrayType) { + return str; + } + if (typeof key === 'symbol') { + const tmp = StringPrototypeReplace( + SymbolPrototypeToString(key), + strEscapeSequencesReplacer, escapeFn + ); + name = `[${ctx.stylize(tmp, 'symbol')}]`; + } else if (key === '__proto__') { + name = "['__proto__']"; + } else if (desc.enumerable === false) { + const tmp = StringPrototypeReplace(key, + strEscapeSequencesReplacer, escapeFn); + name = `[${tmp}]`; + } else if (RegExpPrototypeTest(keyStrRegExp, key)) { + name = ctx.stylize(key, 'name'); + } else { + name = ctx.stylize(strEscape(key), 'string'); + } + return `${name}:${extra}${str}`; +} + +function isBelowBreakLength(ctx, output, start, base) { + // Each entry is separated by at least a comma. Thus, we start with a total + // length of at least `output.length`. In addition, some cases have a + // whitespace in-between each other that is added to the total as well. + // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth + // function. Check the performance overhead and make it an opt-in in case it's + // significant. + let totalLength = output.length + start; + if (totalLength + output.length > ctx.breakLength) + return false; + for (let i = 0; i < output.length; i++) { + if (ctx.colors) { + totalLength += removeColors(output[i]).length; + } else { + totalLength += output[i].length; + } + if (totalLength > ctx.breakLength) { + return false; + } + } + // Do not line up properties on the same line if `base` contains line breaks. + return base === '' || !StringPrototypeIncludes(base, '\n'); +} + +function reduceToSingleString( + ctx, output, base, braces, extrasType, recurseTimes, value) { + if (ctx.compact !== true) { + if (typeof ctx.compact === 'number' && ctx.compact >= 1) { + // Memorize the original output length. In case the output is grouped, + // prevent lining up the entries on a single line. + const entries = output.length; + // Group array elements together if the array contains at least six + // separate entries. + if (extrasType === kArrayExtrasType && entries > 6) { + output = groupArrayElements(ctx, output, value); + } + // `ctx.currentDepth` is set to the most inner depth of the currently + // inspected object part while `recurseTimes` is the actual current depth + // that is inspected. + // + // Example: + // + // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } + // + // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max + // depth of 1. + // + // Consolidate all entries of the local most inner depth up to + // `ctx.compact`, as long as the properties are smaller than + // `ctx.breakLength`. + if (ctx.currentDepth - recurseTimes < ctx.compact && + entries === output.length) { + // Line up all entries on a single line in case the entries do not + // exceed `breakLength`. Add 10 as constant to start next to all other + // factors that may reduce `breakLength`. + const start = output.length + ctx.indentationLvl + + braces[0].length + base.length + 10; + if (isBelowBreakLength(ctx, output, start, base)) { + const joinedOutput = join(output, ', '); + if (!joinedOutput.includes('\n')) { + return `${base ? `${base} ` : ''}${braces[0]} ${joinedOutput}` + + ` ${braces[1]}`; + } + } + } + } + // Line up each entry on an individual line. + const indentation = `\n${StringPrototypeRepeat(' ', ctx.indentationLvl)}`; + return `${base ? `${base} ` : ''}${braces[0]}${indentation} ` + + `${join(output, `,${indentation} `)}${indentation}${braces[1]}`; + } + // Line up all entries on a single line in case the entries do not exceed + // `breakLength`. + if (isBelowBreakLength(ctx, output, 0, base)) { + return `${braces[0]}${base ? ` ${base}` : ''} ${join(output, ', ')} ` + + braces[1]; + } + const indentation = StringPrototypeRepeat(' ', ctx.indentationLvl); + // If the opening "brace" is too large, like in the case of "Set {", + // we need to force the first item to be on the next line or the + // items will not line up correctly. + const ln = base === '' && braces[0].length === 1 ? + ' ' : `${base ? ` ${base}` : ''}\n${indentation} `; + // Line up each entry on an individual line. + return `${braces[0]}${ln}${join(output, `,\n${indentation} `)} ${braces[1]}`; +} + +function hasBuiltInToString(value) { + // Prevent triggering proxy traps. + const getFullProxy = false; + const proxyTarget = getProxyDetails(value, getFullProxy); + if (proxyTarget !== undefined) { + value = proxyTarget; + } + + // Count objects that have no `toString` function as built-in. + if (typeof value.toString !== 'function') { + return true; + } + + // The object has a own `toString` property. Thus it's not not a built-in one. + if (ObjectPrototypeHasOwnProperty(value, 'toString')) { + return false; + } + + // Find the object that has the `toString` property as own property in the + // prototype chain. + let pointer = value; + do { + pointer = ObjectGetPrototypeOf(pointer); + } while (!ObjectPrototypeHasOwnProperty(pointer, 'toString')); + + // Check closer if the object is a built-in. + const descriptor = ObjectGetOwnPropertyDescriptor(pointer, 'constructor'); + return descriptor !== undefined && + typeof descriptor.value === 'function' && + builtInObjects.has(descriptor.value.name); +} + +const firstErrorLine = (error) => + StringPrototypeSplit(error.message, '\n', 1)[0]; +let CIRCULAR_ERROR_MESSAGE; +function tryStringify(arg) { + try { + return JSONStringify(arg); + } catch (err) { + // Populate the circular error message lazily + if (!CIRCULAR_ERROR_MESSAGE) { + try { + const a = {}; a.a = a; JSONStringify(a); + } catch (circularError) { + CIRCULAR_ERROR_MESSAGE = firstErrorLine(circularError); + } + } + if (err.name === 'TypeError' && + firstErrorLine(err) === CIRCULAR_ERROR_MESSAGE) { + return '[Circular]'; + } + throw err; + } +} + +function format(...args) { + return formatWithOptionsInternal(undefined, args); +} + +function formatWithOptions(inspectOptions, ...args) { + if (typeof inspectOptions !== 'object' || inspectOptions === null) { + throw new ERR_INVALID_ARG_TYPE( + 'inspectOptions', 'object', inspectOptions); + } + return formatWithOptionsInternal(inspectOptions, args); +} + +function formatNumberNoColor(number, options) { + return formatNumber( + stylizeNoColor, + number, + options?.numericSeparator ?? inspectDefaultOptions.numericSeparator + ); +} + +function formatBigIntNoColor(bigint, options) { + return formatBigInt( + stylizeNoColor, + bigint, + options?.numericSeparator ?? inspectDefaultOptions.numericSeparator + ); +} + +function formatWithOptionsInternal(inspectOptions, args) { + const first = args[0]; + let a = 0; + let str = ''; + let join = ''; + + if (typeof first === 'string') { + if (args.length === 1) { + return first; + } + let tempStr; + let lastPos = 0; + + for (let i = 0; i < first.length - 1; i++) { + if (StringPrototypeCharCodeAt(first, i) === 37) { // '%' + const nextChar = StringPrototypeCharCodeAt(first, ++i); + if (a + 1 !== args.length) { + switch (nextChar) { + case 115: { // 's' + const tempArg = args[++a]; + if (typeof tempArg === 'number') { + tempStr = formatNumberNoColor(tempArg, inspectOptions); + } else if (typeof tempArg === 'bigint') { + tempStr = formatBigIntNoColor(tempArg, inspectOptions); + } else if (typeof tempArg !== 'object' || + tempArg === null || + !hasBuiltInToString(tempArg)) { + tempStr = String(tempArg); + } else { + tempStr = inspect(tempArg, { + ...inspectOptions, + compact: 3, + colors: false, + depth: 0 + }); + } + break; + } + case 106: // 'j' + tempStr = tryStringify(args[++a]); + break; + case 100: { // 'd' + const tempNum = args[++a]; + if (typeof tempNum === 'bigint') { + tempStr = formatBigIntNoColor(tempNum, inspectOptions); + } else if (typeof tempNum === 'symbol') { + tempStr = 'NaN'; + } else { + tempStr = formatNumberNoColor(Number(tempNum), inspectOptions); + } + break; + } + case 79: // 'O' + tempStr = inspect(args[++a], inspectOptions); + break; + case 111: // 'o' + tempStr = inspect(args[++a], { + ...inspectOptions, + showHidden: true, + showProxy: true, + depth: 4 + }); + break; + case 105: { // 'i' + const tempInteger = args[++a]; + if (typeof tempInteger === 'bigint') { + tempStr = formatBigIntNoColor(tempInteger, inspectOptions); + } else if (typeof tempInteger === 'symbol') { + tempStr = 'NaN'; + } else { + tempStr = formatNumberNoColor( + NumberParseInt(tempInteger), inspectOptions); + } + break; + } + case 102: { // 'f' + const tempFloat = args[++a]; + if (typeof tempFloat === 'symbol') { + tempStr = 'NaN'; + } else { + tempStr = formatNumberNoColor( + NumberParseFloat(tempFloat), inspectOptions); + } + break; + } + case 99: // 'c' + a += 1; + tempStr = ''; + break; + case 37: // '%' + str += StringPrototypeSlice(first, lastPos, i); + lastPos = i + 1; + continue; + default: // Any other character is not a correct placeholder + continue; + } + if (lastPos !== i - 1) { + str += StringPrototypeSlice(first, lastPos, i - 1); + } + str += tempStr; + lastPos = i + 1; + } else if (nextChar === 37) { + str += StringPrototypeSlice(first, lastPos, i); + lastPos = i + 1; + } + } + } + if (lastPos !== 0) { + a++; + join = ' '; + if (lastPos < first.length) { + str += StringPrototypeSlice(first, lastPos); + } + } + } + + while (a < args.length) { + const value = args[a]; + str += join; + str += typeof value !== 'string' ? inspect(value, inspectOptions) : value; + join = ' '; + a++; + } + return str; +} + +if (false) { + const icu = {}; + // icu.getStringWidth(string, ambiguousAsFullWidth, expandEmojiSequence) + // Defaults: ambiguousAsFullWidth = false; expandEmojiSequence = true; + // TODO(BridgeAR): Expose the options to the user. That is probably the + // best thing possible at the moment, since it's difficult to know what + // the receiving end supports. + getStringWidth = function getStringWidth(str, removeControlChars = true) { + let width = 0; + + if (removeControlChars) + str = stripVTControlCharacters(str); + for (let i = 0; i < str.length; i++) { + // Try to avoid calling into C++ by first handling the ASCII portion of + // the string. If it is fully ASCII, we skip the C++ part. + const code = str.charCodeAt(i); + if (code >= 127) { + width += icu.getStringWidth(str.slice(i).normalize('NFC')); + break; + } + width += code >= 32 ? 1 : 0; + } + return width; + }; +} else { + /** + * Returns the number of columns required to display the given string. + */ + getStringWidth = function getStringWidth(str, removeControlChars = true) { + let width = 0; + + if (removeControlChars) + str = stripVTControlCharacters(str); + str = StringPrototypeNormalize(str, 'NFC'); + for (const char of new SafeStringIterator(str)) { + const code = StringPrototypeCodePointAt(char, 0); + if (isFullWidthCodePoint(code)) { + width += 2; + } else if (!isZeroWidthCodePoint(code)) { + width++; + } + } + + return width; + }; + + /** + * Returns true if the character represented by a given + * Unicode code point is full-width. Otherwise returns false. + */ + const isFullWidthCodePoint = (code) => { + // Code points are partially derived from: + // https://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt + return code >= 0x1100 && ( + code <= 0x115f || // Hangul Jamo + code === 0x2329 || // LEFT-POINTING ANGLE BRACKET + code === 0x232a || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (code >= 0x3250 && code <= 0x4dbf) || + // CJK Unified Ideographs .. Yi Radicals + (code >= 0x4e00 && code <= 0xa4c6) || + // Hangul Jamo Extended-A + (code >= 0xa960 && code <= 0xa97c) || + // Hangul Syllables + (code >= 0xac00 && code <= 0xd7a3) || + // CJK Compatibility Ideographs + (code >= 0xf900 && code <= 0xfaff) || + // Vertical Forms + (code >= 0xfe10 && code <= 0xfe19) || + // CJK Compatibility Forms .. Small Form Variants + (code >= 0xfe30 && code <= 0xfe6b) || + // Halfwidth and Fullwidth Forms + (code >= 0xff01 && code <= 0xff60) || + (code >= 0xffe0 && code <= 0xffe6) || + // Kana Supplement + (code >= 0x1b000 && code <= 0x1b001) || + // Enclosed Ideographic Supplement + (code >= 0x1f200 && code <= 0x1f251) || + // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff + // Emoticons 0x1f600 - 0x1f64f + (code >= 0x1f300 && code <= 0x1f64f) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (code >= 0x20000 && code <= 0x3fffd) + ); + }; + + const isZeroWidthCodePoint = (code) => { + return code <= 0x1F || // C0 control codes + (code >= 0x7F && code <= 0x9F) || // C1 control codes + (code >= 0x300 && code <= 0x36F) || // Combining Diacritical Marks + (code >= 0x200B && code <= 0x200F) || // Modifying Invisible Characters + // Combining Diacritical Marks for Symbols + (code >= 0x20D0 && code <= 0x20FF) || + (code >= 0xFE00 && code <= 0xFE0F) || // Variation Selectors + (code >= 0xFE20 && code <= 0xFE2F) || // Combining Half Marks + (code >= 0xE0100 && code <= 0xE01EF); // Variation Selectors + }; +} + +/** + * Remove all VT control characters. Use to estimate displayed string width. + */ +function stripVTControlCharacters(str) { + validateString(str, 'str'); + + return str.replace(ansi, ''); +} + +module.exports = { + inspect, + format, + formatWithOptions, + getStringWidth, + inspectDefaultOptions, + stripVTControlCharacters +}; diff --git a/lib/internal/js_stream_socket.js b/lib/internal/js_stream_socket.js new file mode 100644 index 0000000000..7f27e6d516 --- /dev/null +++ b/lib/internal/js_stream_socket.js @@ -0,0 +1,242 @@ +'use strict'; + +const { + Symbol, +} = require('./primordials'); + +const { setImmediate } = require('timers'); +const assert = require('assert'); +const { Socket } = require('net'); +const { JSStream } = process.binding('js_stream'); +const uv = process.binding('uv'); +let debug = require('../util').debuglog( + 'stream_socket', + (fn) => { + debug = fn; + } +); +const { owner_symbol } = require('internal/async_hooks').symbols; +const { ERR_STREAM_WRAP } = require('./errors').codes; + +const kCurrentWriteRequest = Symbol('kCurrentWriteRequest'); +const kCurrentShutdownRequest = Symbol('kCurrentShutdownRequest'); +const kPendingShutdownRequest = Symbol('kPendingShutdownRequest'); + +function isClosing() { return this[owner_symbol].isClosing(); } + +function onreadstart() { return this[owner_symbol].readStart(); } + +function onreadstop() { return this[owner_symbol].readStop(); } + +function onshutdown(req) { return this[owner_symbol].doShutdown(req); } + +function onwrite(req, bufs) { return this[owner_symbol].doWrite(req, bufs); } + +/* This class serves as a wrapper for when the C++ side of Node wants access + * to a standard JS stream. For example, TLS or HTTP do not operate on network + * resources conceptually, although that is the common case and what we are + * optimizing for; in theory, they are completely composable and can work with + * any stream resource they see. + * + * For the common case, i.e. a TLS socket wrapping around a net.Socket, we + * can skip going through the JS layer and let TLS access the raw C++ handle + * of a net.Socket. The flipside of this is that, to maintain composability, + * we need a way to create "fake" net.Socket instances that call back into a + * "real" JavaScript stream. JSStreamSocket is exactly this. + */ +class JSStreamSocket extends Socket { + constructor(stream) { + const handle = new JSStream(); + handle.close = (cb) => { + debug('close'); + this.doClose(cb); + }; + // Inside of the following functions, `this` refers to the handle + // and `this[owner_symbol]` refers to this JSStreamSocket instance. + handle.isClosing = isClosing; + handle.onreadstart = onreadstart; + handle.onreadstop = onreadstop; + handle.onshutdown = onshutdown; + handle.onwrite = onwrite; + + stream.pause(); + stream.on('error', (err) => this.emit('error', err)); + const ondata = (chunk) => { + if (typeof chunk === 'string' || + stream.readableObjectMode === true) { + // Make sure that no further `data` events will happen. + stream.pause(); + stream.removeListener('data', ondata); + + this.emit('error', new ERR_STREAM_WRAP()); + return; + } + + debug('data', chunk.length); + if (this._handle) + this._handle.readBuffer(chunk); + }; + stream.on('data', ondata); + stream.once('end', () => { + debug('end'); + if (this._handle) + this._handle.emitEOF(); + }); + // Some `Stream` don't pass `hasError` parameters when closed. + stream.once('close', () => { + // Errors emitted from `stream` have also been emitted to this instance + // so that we don't pass errors to `destroy()` again. + this.destroy(); + }); + + super({ handle, manualStart: true }); + this.stream = stream; + this[kCurrentWriteRequest] = null; + this[kCurrentShutdownRequest] = null; + this[kPendingShutdownRequest] = null; + this.readable = stream.readable; + this.writable = stream.writable; + + // Start reading. + this.read(0); + } + + // Allow legacy requires in the test suite to keep working: + // const { StreamWrap } = require('./js_stream_socket') + static get StreamWrap() { + return JSStreamSocket; + } + + isClosing() { + return !this.readable || !this.writable; + } + + readStart() { + this.stream.resume(); + return 0; + } + + readStop() { + this.stream.pause(); + return 0; + } + + doShutdown(req) { + // TODO(addaleax): It might be nice if we could get into a state where + // DoShutdown() is not called on streams while a write is still pending. + // + // Currently, the only part of the code base where that happens is the + // TLS implementation, which calls both DoWrite() and DoShutdown() on the + // underlying network stream inside of its own DoShutdown() method. + // Working around that on the native side is not quite trivial (yet?), + // so for now that is supported here. + + if (this[kCurrentWriteRequest] !== null) { + this[kPendingShutdownRequest] = req; + return 0; + } + assert(this[kCurrentWriteRequest] === null); + assert(this[kCurrentShutdownRequest] === null); + this[kCurrentShutdownRequest] = req; + + const handle = this._handle; + + setImmediate(() => { + // Ensure that write is dispatched asynchronously. + this.stream.end(() => { + this.finishShutdown(handle, 0); + }); + }); + return 0; + } + + // handle === this._handle except when called from doClose(). + finishShutdown(handle, errCode) { + // The shutdown request might already have been cancelled. + if (this[kCurrentShutdownRequest] === null) + return; + const req = this[kCurrentShutdownRequest]; + this[kCurrentShutdownRequest] = null; + handle.finishShutdown(req, errCode); + } + + doWrite(req, bufs) { + assert(this[kCurrentWriteRequest] === null); + assert(this[kCurrentShutdownRequest] === null); + + const handle = this._handle; + const self = this; + + let pending = bufs.length; + + this.stream.cork(); + // Use `var` over `let` for performance optimization. + // eslint-disable-next-line no-var + for (var i = 0; i < bufs.length; ++i) + this.stream.write(bufs[i], done); + this.stream.uncork(); + + // Only set the request here, because the `write()` calls could throw. + this[kCurrentWriteRequest] = req; + + function done(err) { + if (!err && --pending !== 0) + return; + + // Ensure that this is called once in case of error + pending = 0; + + let errCode = 0; + if (err) { + errCode = uv[`UV_${err.code}`] || uv.UV_EPIPE; + } + + // Ensure that write was dispatched + setImmediate(() => { + self.finishWrite(handle, errCode); + }); + } + + return 0; + } + + // handle === this._handle except when called from doClose(). + finishWrite(handle, errCode) { + // The write request might already have been cancelled. + if (this[kCurrentWriteRequest] === null) + return; + const req = this[kCurrentWriteRequest]; + this[kCurrentWriteRequest] = null; + + handle.finishWrite(req, errCode); + if (this[kPendingShutdownRequest]) { + const req = this[kPendingShutdownRequest]; + this[kPendingShutdownRequest] = null; + this.doShutdown(req); + } + } + + doClose(cb) { + const handle = this._handle; + + // When sockets of the "net" module destroyed, they will call + // `this._handle.close()` which will also emit EOF if not emitted before. + // This feature makes sockets on the other side emit "end" and "close" + // even though we haven't called `end()`. As `stream` are likely to be + // instances of `net.Socket`, calling `stream.destroy()` manually will + // avoid issues that don't properly close wrapped connections. + this.stream.destroy(); + + setImmediate(() => { + // Should be already set by net.js + assert(this._handle === null); + + this.finishWrite(handle, uv.UV_ECANCELED); + this.finishShutdown(handle, uv.UV_ECANCELED); + + cb(); + }); + } +} + +module.exports = JSStreamSocket; diff --git a/lib/internal/primordials.js b/lib/internal/primordials.js new file mode 100644 index 0000000000..eded9f2f64 --- /dev/null +++ b/lib/internal/primordials.js @@ -0,0 +1,446 @@ + + 'use strict'; + + const primordials = module.exports = {} + + +/* eslint-disable node-core/prefer-primordials */ + +// This file subclasses and stores the JS builtins that come from the VM +// so that Node.js's builtin modules do not need to later look these up from +// the global proxy, which can be mutated by users. + +// Use of primordials have sometimes a dramatic impact on performance, please +// benchmark all changes made in performance-sensitive areas of the codebase. +// See: https://github.com/nodejs/node/pull/38248 + +const { + defineProperty: ReflectDefineProperty, + getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor, + ownKeys: ReflectOwnKeys, +} + = Reflect; + + if (typeof AggregateError === 'undefined') { + globalThis.AggregateError = require('aggregate-error'); + } + + +// `uncurryThis` is equivalent to `func => Function.prototype.call.bind(func)`. +// It is using `bind.bind(call)` to avoid using `Function.prototype.bind` +// and `Function.prototype.call` after it may have been mutated by users. +const { apply, bind, call } = Function.prototype; +const uncurryThis = bind.bind(call); +primordials.uncurryThis = uncurryThis; + +// `applyBind` is equivalent to `func => Function.prototype.apply.bind(func)`. +// It is using `bind.bind(apply)` to avoid using `Function.prototype.bind` +// and `Function.prototype.apply` after it may have been mutated by users. +const applyBind = bind.bind(apply); +primordials.applyBind = applyBind; + +// Methods that accept a variable number of arguments, and thus it's useful to +// also create `${prefix}${key}Apply`, which uses `Function.prototype.apply`, +// instead of `Function.prototype.call`, and thus doesn't require iterator +// destructuring. +const varargsMethods = [ + // 'ArrayPrototypeConcat' is omitted, because it performs the spread + // on its own for arrays and array-likes with a truthy + // @@isConcatSpreadable symbol property. + 'ArrayOf', + 'ArrayPrototypePush', + 'ArrayPrototypeUnshift', + // 'FunctionPrototypeCall' is omitted, since there's 'ReflectApply' + // and 'FunctionPrototypeApply'. + 'MathHypot', + 'MathMax', + 'MathMin', + 'StringPrototypeConcat', + 'TypedArrayOf', +]; + +function getNewKey(key) { + return typeof key === 'symbol' ? + `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` : + `${key[0].toUpperCase()}${key.slice(1)}`; +} + +function copyAccessor(dest, prefix, key, { enumerable, get, set }) { + ReflectDefineProperty(dest, `${prefix}Get${key}`, { + value: uncurryThis(get), + enumerable + }); + if (set !== undefined) { + ReflectDefineProperty(dest, `${prefix}Set${key}`, { + value: uncurryThis(set), + enumerable + }); + } +} + +function copyPropsRenamed(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + // `src` is bound as the `this` so that the static `this` points + // to the object it was defined on, + // e.g.: `ArrayOfApply` gets a `this` of `Array`: + value: applyBind(desc.value, src), + }); + } + } + } +} + +function copyPropsRenamedBound(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const { value } = desc; + if (typeof value === 'function') { + desc.value = value.bind(src); + } + + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + value: applyBind(value, src), + }); + } + } + } +} + +function copyPrototype(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const { value } = desc; + if (typeof value === 'function') { + desc.value = uncurryThis(value); + } + + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + value: applyBind(value), + }); + } + } + } +} + +// Create copies of configurable value properties of the global object +[ + 'Proxy', + 'globalThis', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + primordials[name] = globalThis[name]; +}); + +// Create copies of URI handling functions +[ + decodeURI, + decodeURIComponent, + encodeURI, + encodeURIComponent, +].forEach((fn) => { + primordials[fn.name] = fn; +}); + +// Create copies of legacy functions +[ + escape, + eval, + unescape, +].forEach((fn) => { + primordials[fn.name] = fn; +}); + +// Create copies of the namespace objects +[ + 'JSON', + 'Math', + 'Proxy', + 'Reflect', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + copyPropsRenamed(globalThis[name], primordials, name); +}); + +// Create copies of intrinsic objects +[ + 'AggregateError', + 'Array', + 'ArrayBuffer', + 'BigInt', + 'BigInt64Array', + 'BigUint64Array', + 'Boolean', + 'DataView', + 'Date', + 'Error', + 'EvalError', + 'FinalizationRegistry', + 'Float32Array', + 'Float64Array', + 'Function', + 'Int16Array', + 'Int32Array', + 'Int8Array', + 'Map', + 'Number', + 'Object', + 'RangeError', + 'ReferenceError', + 'RegExp', + 'Set', + 'String', + 'Symbol', + 'SyntaxError', + 'TypeError', + 'URIError', + 'Uint16Array', + 'Uint32Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'WeakMap', + 'WeakRef', + 'WeakSet', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + const original = globalThis[name]; + primordials[name] = original; + copyPropsRenamed(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +// Create copies of intrinsic objects that require a valid `this` to call +// static methods. +// Refs: https://www.ecma-international.org/ecma-262/#sec-promise.all +[ + 'Promise', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + const original = globalThis[name]; + primordials[name] = original; + copyPropsRenamedBound(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +// Create copies of abstract intrinsic objects that are not directly exposed +// on the global object. +// Refs: https://tc39.es/ecma262/#sec-%typedarray%-intrinsic-object +[ + { name: 'TypedArray', original: Reflect.getPrototypeOf(Uint8Array) }, + { name: 'ArrayIterator', original: { + prototype: Reflect.getPrototypeOf(Array.prototype[Symbol.iterator]()), + } }, + { name: 'StringIterator', original: { + prototype: Reflect.getPrototypeOf(String.prototype[Symbol.iterator]()), + } }, +].forEach(({ name, original }) => { + primordials[name] = original; + // The static %TypedArray% methods require a valid `this`, but can't be bound, + // as they need a subclass constructor as the receiver: + copyPrototype(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +/* eslint-enable node-core/prefer-primordials */ + +const { + ArrayPrototypeForEach, + FinalizationRegistry, + FunctionPrototypeCall, + Map, + ObjectFreeze, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeThen, + Set, + SymbolIterator, + WeakMap, + WeakRef, + WeakSet, +} = require('./primordials'); + +// Because these functions are used by `makeSafe`, which is exposed +// on the `primordials` object, it's important to use const references +// to the primordials that they use: +const createSafeIterator = (factory, next) => { + class SafeIterator { + constructor(iterable) { + this._iterator = factory(iterable); + } + next() { + return next(this._iterator); + } + [SymbolIterator]() { + return this; + } + } + ObjectSetPrototypeOf(SafeIterator.prototype, null); + ObjectFreeze(SafeIterator.prototype); + ObjectFreeze(SafeIterator); + return SafeIterator; +}; + +primordials.SafeArrayIterator = createSafeIterator( + primordials.ArrayPrototypeSymbolIterator, + primordials.ArrayIteratorPrototypeNext +); +primordials.SafeStringIterator = createSafeIterator( + primordials.StringPrototypeSymbolIterator, + primordials.StringIteratorPrototypeNext +); + +const copyProps = (src, dest) => { + ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => { + if (!ReflectGetOwnPropertyDescriptor(dest, key)) { + ReflectDefineProperty( + dest, + key, + ReflectGetOwnPropertyDescriptor(src, key)); + } + }); +}; + +/** + * @type {typeof primordials.makeSafe} + */ +const makeSafe = (unsafe, safe) => { + if (SymbolIterator in unsafe.prototype) { + const dummy = new unsafe(); + let next; // We can reuse the same `next` method. + + ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => { + if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) { + const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key); + if ( + typeof desc.value === 'function' && + desc.value.length === 0 && + SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {}) + ) { + const createIterator = uncurryThis(desc.value); + if (typeof next === 'undefined') { + next = uncurryThis(createIterator(dummy).next); + } + + const SafeIterator = createSafeIterator(createIterator, next); + desc.value = function() { + return new SafeIterator(this); + }; + } + ReflectDefineProperty(safe.prototype, key, desc); + } + }); + } else { + copyProps(unsafe.prototype, safe.prototype); + } + copyProps(unsafe, safe); + + ObjectSetPrototypeOf(safe.prototype, null); + ObjectFreeze(safe.prototype); + ObjectFreeze(safe); + return safe; +}; +primordials.makeSafe = makeSafe; + +// Subclass the constructors because we need to use their prototype +// methods later. +// Defining the `constructor` is necessary here to avoid the default +// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. +primordials.SafeMap = makeSafe( + Map, + class SafeMap extends Map { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakMap = makeSafe( + WeakMap, + class SafeWeakMap extends WeakMap { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); + +primordials.SafeSet = makeSafe( + Set, + class SafeSet extends Set { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakSet = makeSafe( + WeakSet, + class SafeWeakSet extends WeakSet { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); + +primordials.SafeFinalizationRegistry = makeSafe( + FinalizationRegistry, + class SafeFinalizationRegistry extends FinalizationRegistry { + // eslint-disable-next-line no-useless-constructor + constructor(cleanupCallback) { super(cleanupCallback); } + } +); +primordials.SafeWeakRef = makeSafe( + WeakRef, + class SafeWeakRef extends WeakRef { + // eslint-disable-next-line no-useless-constructor + constructor(target) { super(target); } + } +); + +const SafePromise = makeSafe( + Promise, + class SafePromise extends Promise { + // eslint-disable-next-line no-useless-constructor + constructor(executor) { super(executor); } + } +); + +primordials.PromisePrototypeCatch = (thisPromise, onRejected) => + PromisePrototypeThen(thisPromise, undefined, onRejected); + +/** + * Attaches a callback that is invoked when the Promise is settled (fulfilled or + * rejected). The resolved value cannot be modified from the callback. + * Prefer using async functions when possible. + * @param {Promise} thisPromise + * @param {() => void) | undefined | null} onFinally The callback to execute + * when the Promise is settled (fulfilled or rejected). + * @returns {Promise} A Promise for the completion of the callback. + */ +primordials.SafePromisePrototypeFinally = (thisPromise, onFinally) => + // Wrapping on a new Promise is necessary to not expose the SafePromise + // prototype to user-land. + new Promise((a, b) => + new SafePromise((a, b) => PromisePrototypeThen(thisPromise, a, b)) + .finally(onFinally) + .then(a, b) + ); + +primordials.AsyncIteratorPrototype = + primordials.ReflectGetPrototypeOf( + primordials.ReflectGetPrototypeOf( + async function* () {}).prototype); + +ObjectSetPrototypeOf(primordials, null); +ObjectFreeze(primordials); diff --git a/lib/internal/streams/add-abort-signal.js b/lib/internal/streams/add-abort-signal.js new file mode 100644 index 0000000000..12124490b7 --- /dev/null +++ b/lib/internal/streams/add-abort-signal.js @@ -0,0 +1,46 @@ +'use strict'; + +const { + AbortError, + codes, +} = require('../errors'); + +const eos = require('./end-of-stream'); +const { ERR_INVALID_ARG_TYPE } = codes; + +// This method is inlined here for readable-stream +// It also does not allow for signal to not exist on the stream +// https://github.com/nodejs/node/pull/36061#discussion_r533718029 +const validateAbortSignal = (signal, name) => { + if (typeof signal !== 'object' || + !('aborted' in signal)) { + throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal); + } +}; + +function isNodeStream(obj) { + return !!(obj && typeof obj.pipe === 'function'); +} + +module.exports.addAbortSignal = function addAbortSignal(signal, stream) { + validateAbortSignal(signal, 'signal'); + if (!isNodeStream(stream)) { + throw new ERR_INVALID_ARG_TYPE('stream', 'stream.Stream', stream); + } + return module.exports.addAbortSignalNoValidate(signal, stream); +}; +module.exports.addAbortSignalNoValidate = function(signal, stream) { + if (typeof signal !== 'object' || !('aborted' in signal)) { + return stream; + } + const onAbort = () => { + stream.destroy(new AbortError(undefined, { cause: signal.reason })); + }; + if (signal.aborted) { + onAbort(); + } else { + signal.addEventListener('abort', onAbort); + eos(stream, () => signal.removeEventListener('abort', onAbort)); + } + return stream; +}; diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000000..e3e6bd923b --- /dev/null +++ b/lib/internal/streams/buffer_list.js @@ -0,0 +1,181 @@ +'use strict'; + +const { + StringPrototypeSlice, + SymbolIterator, + TypedArrayPrototypeSet, + Uint8Array, +} = require('../primordials'); + +const { Buffer } = require('buffer'); +const inspect = { custom: Symbol('nodejs.util.inspect.custom') }; + +module.exports = class BufferList { + constructor() { + this.head = null; + this.tail = null; + this.length = 0; + } + + push(v) { + const entry = { data: v, next: null }; + if (this.length > 0) + this.tail.next = entry; + else + this.head = entry; + this.tail = entry; + ++this.length; + } + + unshift(v) { + const entry = { data: v, next: this.head }; + if (this.length === 0) + this.tail = entry; + this.head = entry; + ++this.length; + } + + shift() { + if (this.length === 0) + return; + const ret = this.head.data; + if (this.length === 1) + this.head = this.tail = null; + else + this.head = this.head.next; + --this.length; + return ret; + } + + clear() { + this.head = this.tail = null; + this.length = 0; + } + + join(s) { + if (this.length === 0) + return ''; + let p = this.head; + let ret = '' + p.data; + while ((p = p.next) !== null) + ret += s + p.data; + return ret; + } + + concat(n) { + if (this.length === 0) + return Buffer.alloc(0); + const ret = Buffer.allocUnsafe(n >>> 0); + let p = this.head; + let i = 0; + while (p) { + TypedArrayPrototypeSet(ret, p.data, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + const data = this.head.data; + if (n < data.length) { + // `slice` is the same for buffers and strings. + const slice = data.slice(0, n); + this.head.data = data.slice(n); + return slice; + } + if (n === data.length) { + // First chunk is a perfect match. + return this.shift(); + } + // Result spans more than one buffer. + return hasStrings ? this._getString(n) : this._getBuffer(n); + } + + first() { + return this.head.data; + } + + *[SymbolIterator]() { + for (let p = this.head; p; p = p.next) { + yield p.data; + } + } + + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + let ret = ''; + let p = this.head; + let c = 0; + do { + const str = p.data; + if (n > str.length) { + ret += str; + n -= str.length; + } else { + if (n === str.length) { + ret += str; + ++c; + if (p.next) + this.head = p.next; + else + this.head = this.tail = null; + } else { + ret += StringPrototypeSlice(str, 0, n); + this.head = p; + p.data = StringPrototypeSlice(str, n); + } + break; + } + ++c; + } while ((p = p.next) !== null); + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n); + const retLen = n; + let p = this.head; + let c = 0; + do { + const buf = p.data; + if (n > buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n); + n -= buf.length; + } else { + if (n === buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n); + ++c; + if (p.next) + this.head = p.next; + else + this.head = this.tail = null; + } else { + TypedArrayPrototypeSet(ret, + new Uint8Array(buf.buffer, buf.byteOffset, n), + retLen - n); + this.head = p; + p.data = buf.slice(n); + } + break; + } + ++c; + } while ((p = p.next) !== null); + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + [inspect.custom](_, options) { + return inspect(this, { + ...options, + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + }); + } +}; diff --git a/lib/internal/streams/compose.js b/lib/internal/streams/compose.js new file mode 100644 index 0000000000..065325a2eb --- /dev/null +++ b/lib/internal/streams/compose.js @@ -0,0 +1,174 @@ +'use strict'; + +const { pipeline } = require('./pipeline'); +const Duplex = require('./duplex'); +const { destroyer } = require('./destroy'); +const { + isNodeStream, + isReadable, + isWritable, +} = require('./utils'); +const { + AbortError, + codes: { + ERR_INVALID_ARG_VALUE, + ERR_MISSING_ARGS, + }, +} = require('../errors'); + +module.exports = function compose(...streams) { + if (streams.length === 0) { + throw new ERR_MISSING_ARGS('streams'); + } + + if (streams.length === 1) { + return Duplex.from(streams[0]); + } + + const orgStreams = [...streams]; + + if (typeof streams[0] === 'function') { + streams[0] = Duplex.from(streams[0]); + } + + if (typeof streams[streams.length - 1] === 'function') { + const idx = streams.length - 1; + streams[idx] = Duplex.from(streams[idx]); + } + + for (let n = 0; n < streams.length; ++n) { + if (!isNodeStream(streams[n])) { + // TODO(ronag): Add checks for non streams. + continue; + } + if (n < streams.length - 1 && !isReadable(streams[n])) { + throw new ERR_INVALID_ARG_VALUE( + `streams[${n}]`, + orgStreams[n], + 'must be readable' + ); + } + if (n > 0 && !isWritable(streams[n])) { + throw new ERR_INVALID_ARG_VALUE( + `streams[${n}]`, + orgStreams[n], + 'must be writable' + ); + } + } + + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + + function onfinished(err) { + const cb = onclose; + onclose = null; + + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + + const head = streams[0]; + const tail = pipeline(streams, onfinished); + + const writable = !!isWritable(head); + const readable = !!isReadable(tail); + + // TODO(ronag): Avoid double buffering. + // Implement Writable/Readable/Duplex traits. + // See, https://github.com/nodejs/node/pull/33515. + d = new Duplex({ + // TODO (ronag): highWaterMark? + writableObjectMode: !!head?.writableObjectMode, + readableObjectMode: !!tail?.writableObjectMode, + writable, + readable, + }); + + if (writable) { + d._write = function(chunk, encoding, callback) { + if (head.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + + d._final = function(callback) { + head.end(); + onfinish = callback; + }; + + head.on('drain', function() { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + + tail.on('finish', function() { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + + if (readable) { + tail.on('readable', function() { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + + tail.on('end', function() { + d.push(null); + }); + + d._read = function() { + while (true) { + const buf = tail.read(); + + if (buf === null) { + onreadable = d._read; + return; + } + + if (!d.push(buf)) { + return; + } + } + }; + } + + d._destroy = function(err, callback) { + if (!err && onclose !== null) { + err = new AbortError(); + } + + onreadable = null; + ondrain = null; + onfinish = null; + + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(tail, err); + } + }; + + return d; +}; diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js new file mode 100644 index 0000000000..f719ea88cc --- /dev/null +++ b/lib/internal/streams/destroy.js @@ -0,0 +1,368 @@ +'use strict'; + +const { + aggregateTwoErrors, + codes: { + ERR_MULTIPLE_CALLBACK, + }, + AbortError, +} = require('../errors'); +const { + Symbol, +} = require('../primordials'); +const { + kDestroyed, + isDestroyed, + isFinished, + isServerRequest +} = require('./utils'); + +const kDestroy = Symbol('kDestroy'); +const kConstruct = Symbol('kConstruct'); + +function checkError(err, w, r) { + if (err) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + err.stack; // eslint-disable-line no-unused-expressions + + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + } +} + +// Backwards compat. cb() is undocumented and unused in core but +// unfortunately might be used by modules. +function destroy(err, cb) { + const r = this._readableState; + const w = this._writableState; + // With duplex streams we use the writable side for state. + const s = w || r; + + if ((w && w.destroyed) || (r && r.destroyed)) { + if (typeof cb === 'function') { + cb(); + } + + return this; + } + + + // We set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + checkError(err, w, r); + + if (w) { + w.destroyed = true; + } + if (r) { + r.destroyed = true; + } + + // If still constructing then defer calling _destroy. + if (!s.constructed) { + this.once(kDestroy, function(er) { + _destroy(this, aggregateTwoErrors(er, err), cb); + }); + } else { + _destroy(this, err, cb); + } + + return this; +} + +function _destroy(self, err, cb) { + let called = false; + + function onDestroy(err) { + if (called) { + return; + } + called = true; + + const r = self._readableState; + const w = self._writableState; + + checkError(err, w, r); + + if (w) { + w.closed = true; + } + if (r) { + r.closed = true; + } + + if (typeof cb === 'function') { + cb(err); + } + + if (err) { + process.nextTick(emitErrorCloseNT, self, err); + } else { + process.nextTick(emitCloseNT, self); + } + } + try { + const result = self._destroy(err || null, onDestroy); + if (result != null) { + const then = result.then; + if (typeof then === 'function') { + then.call( + result, + function() { + process.nextTick(onDestroy, null); + }, + function(err) { + process.nextTick(onDestroy, err); + }); + } + } + } catch (err) { + onDestroy(err); + } +} + +function emitErrorCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + const r = self._readableState; + const w = self._writableState; + + if (w) { + w.closeEmitted = true; + } + if (r) { + r.closeEmitted = true; + } + + if ((w && w.emitClose) || (r && r.emitClose)) { + self.emit('close'); + } +} + +function emitErrorNT(self, err) { + const r = self._readableState; + const w = self._writableState; + + if ((w && w.errorEmitted) || (r && r.errorEmitted)) { + return; + } + + if (w) { + w.errorEmitted = true; + } + if (r) { + r.errorEmitted = true; + } + + self.emit('error', err); +} + +function undestroy() { + const r = this._readableState; + const w = this._writableState; + + if (r) { + r.constructed = true; + r.closed = false; + r.closeEmitted = false; + r.destroyed = false; + r.errored = null; + r.errorEmitted = false; + r.reading = false; + r.ended = r.readable === false; + r.endEmitted = r.readable === false; + } + + if (w) { + w.constructed = true; + w.destroyed = false; + w.closed = false; + w.closeEmitted = false; + w.errored = null; + w.errorEmitted = false; + w.finalCalled = false; + w.prefinished = false; + w.ended = w.writable === false; + w.ending = w.writable === false; + w.finished = w.writable === false; + } +} + +function errorOrDestroy(stream, err, sync) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + const r = stream._readableState; + const w = stream._writableState; + + if ((w && w.destroyed) || (r && r.destroyed)) { + return this; + } + + if ((r && r.autoDestroy) || (w && w.autoDestroy)) + stream.destroy(err); + else if (err) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + err.stack; // eslint-disable-line no-unused-expressions + + if (w && !w.errored) { + w.errored = err; + } + if (r && !r.errored) { + r.errored = err; + } + if (sync) { + process.nextTick(emitErrorNT, stream, err); + } else { + emitErrorNT(stream, err); + } + } +} + +function construct(stream, cb) { + if (typeof stream._construct !== 'function') { + return; + } + + const r = stream._readableState; + const w = stream._writableState; + + if (r) { + r.constructed = false; + } + if (w) { + w.constructed = false; + } + + stream.once(kConstruct, cb); + + if (stream.listenerCount(kConstruct) > 1) { + // Duplex + return; + } + + process.nextTick(constructNT, stream); +} + +function constructNT(stream) { + let called = false; + + function onConstruct(err) { + if (called) { + errorOrDestroy(stream, err ?? new ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + + const r = stream._readableState; + const w = stream._writableState; + const s = w || r; + + if (r) { + r.constructed = true; + } + if (w) { + w.constructed = true; + } + + if (s.destroyed) { + stream.emit(kDestroy, err); + } else if (err) { + errorOrDestroy(stream, err, true); + } else { + process.nextTick(emitConstructNT, stream); + } + } + + try { + const result = stream._construct(onConstruct); + if (result != null) { + const then = result.then; + if (typeof then === 'function') { + then.call( + result, + function() { + if (!called) { + process.nextTick(onConstruct, null); + } + }, + function(err) { + if (!called) { + process.nextTick(onConstruct, err); + } + }); + } + } + } catch (err) { + onConstruct(err); + } +} + +function emitConstructNT(stream) { + stream.emit(kConstruct); +} + +function isRequest(stream) { + return stream && stream.setHeader && typeof stream.abort === 'function'; +} + +function emitCloseLegacy(stream) { + stream.emit('close'); +} + +function emitErrorCloseLegacy(stream, err) { + stream.emit('error', err); + process.nextTick(emitCloseLegacy, stream); +} + +// Normalize destroy for legacy. +function destroyer(stream, err) { + if (!stream || isDestroyed(stream)) { + return; + } + + if (!err && !isFinished(stream)) { + err = new AbortError(); + } + + // TODO: Remove isRequest branches. + if (isServerRequest(stream)) { + stream.socket = null; + stream.destroy(err); + } else if (isRequest(stream)) { + stream.abort(); + } else if (isRequest(stream.req)) { + stream.req.abort(); + } else if (typeof stream.destroy === 'function') { + stream.destroy(err); + } else if (typeof stream.close === 'function') { + // TODO: Don't lose err? + stream.close(); + } else if (err) { + process.nextTick(emitErrorCloseLegacy, stream); + } else { + process.nextTick(emitCloseLegacy, stream); + } + + if (!stream.destroyed) { + stream[kDestroyed] = true; + } +} + +module.exports = { + construct, + destroyer, + destroy, + undestroy, + errorOrDestroy +}; diff --git a/lib/internal/streams/duplex.js b/lib/internal/streams/duplex.js new file mode 100644 index 0000000000..66668e20db --- /dev/null +++ b/lib/internal/streams/duplex.js @@ -0,0 +1,146 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototype inheritance, this class +// prototypically inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +const { + ObjectDefineProperties, + ObjectGetOwnPropertyDescriptor, + ObjectKeys, + ObjectSetPrototypeOf, +} = require('../primordials'); + +module.exports = Duplex; + +const Readable = require('./readable'); +const Writable = require('./writable'); + +ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); +ObjectSetPrototypeOf(Duplex, Readable); + +{ + const keys = ObjectKeys(Writable.prototype); + // Allow the keys array to be GC'ed. + for (let i = 0; i < keys.length; i++) { + const method = keys[i]; + if (!Duplex.prototype[method]) + Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) + return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options) { + this.allowHalfOpen = options.allowHalfOpen !== false; + + if (options.readable === false) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + + if (options.writable === false) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } else { + this.allowHalfOpen = true; + } +} + +ObjectDefineProperties(Duplex.prototype, { + writable: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable'), + writableHighWaterMark: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark'), + writableObjectMode: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode'), + writableBuffer: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer'), + writableLength: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength'), + writableFinished: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished'), + writableCorked: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked'), + writableEnded: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded'), + writableNeedDrain: + ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain'), + + destroyed: { + get() { + if (this._readableState === undefined || + this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set(value) { + // Backward compatibility, the user is explicitly + // managing destroyed. + if (this._readableState && this._writableState) { + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } + } + } +}); + +let webStreamsAdapters; + +// Lazy to avoid circular references +function lazyWebStreams() { + if (webStreamsAdapters === undefined) + webStreamsAdapters = {}; + return webStreamsAdapters; +} + +Duplex.fromWeb = function(pair, options) { + return lazyWebStreams().newStreamDuplexFromReadableWritablePair( + pair, + options); +}; + +Duplex.toWeb = function(duplex) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); +}; + +let duplexify; + +Duplex.from = function(body) { + if (!duplexify) { + duplexify = require('./duplexify'); + } + return duplexify(body, 'body'); +}; diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js new file mode 100644 index 0000000000..4bfaed7a1a --- /dev/null +++ b/lib/internal/streams/duplexify.js @@ -0,0 +1,388 @@ +'use strict'; + +const { + isReadable, + isWritable, + isIterable, + isNodeStream, + isReadableNodeStream, + isWritableNodeStream, + isDuplexNodeStream, +} = require('./utils'); +const eos = require('./end-of-stream'); +const { + AbortError, + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_RETURN_VALUE, + }, +} = require('../errors'); +const { destroyer } = require('./destroy'); +const Duplex = require('./duplex'); +const Readable = require('./readable'); +const { createDeferredPromise } = require('../../util'); +const from = require('./from'); + +const { + isBlob, +} = + { + isBlob(b) { + return b instanceof Blob + } + } + + const { Blob } = require('buffer'); + + + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; + } + + +const { + FunctionPrototypeCall +} = require('../primordials'); + +// This is needed for pre node 17. +class Duplexify extends Duplex { + constructor(options) { + super(options); + + // https://github.com/nodejs/node/pull/34385 + + if (options?.readable === false) { + this._readableState.readable = false; + this._readableState.ended = true; + this._readableState.endEmitted = true; + } + + if (options?.writable === false) { + this._writableState.writable = false; + this._writableState.ending = true; + this._writableState.ended = true; + this._writableState.finished = true; + } + } +} + +module.exports = function duplexify(body, name) { + if (isDuplexNodeStream(body)) { + return body; + } + + if (isReadableNodeStream(body)) { + return _duplexify({ readable: body }); + } + + if (isWritableNodeStream(body)) { + return _duplexify({ writable: body }); + } + + if (isNodeStream(body)) { + return _duplexify({ writable: false, readable: false }); + } + + // TODO: Webstreams + // if (isReadableStream(body)) { + // return _duplexify({ readable: Readable.fromWeb(body) }); + // } + + // TODO: Webstreams + // if (isWritableStream(body)) { + // return _duplexify({ writable: Writable.fromWeb(body) }); + // } + + if (typeof body === 'function') { + const { value, write, final, destroy } = fromAsyncGen(body); + + if (isIterable(value)) { + return from(Duplexify, value, { + // TODO (ronag): highWaterMark? + objectMode: true, + write, + final, + destroy + }); + } + + const then = value?.then; + if (typeof then === 'function') { + let d; + + const promise = FunctionPrototypeCall( + then, + value, + (val) => { + if (val != null) { + throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val); + } + }, + (err) => { + destroyer(d, err); + } + ); + + return d = new Duplexify({ + // TODO (ronag): highWaterMark? + objectMode: true, + readable: false, + write, + final(cb) { + final(async () => { + try { + await promise; + process.nextTick(cb, null); + } catch (err) { + process.nextTick(cb, err); + } + }); + }, + destroy + }); + } + + throw new ERR_INVALID_RETURN_VALUE( + 'Iterable, AsyncIterable or AsyncFunction', name, value); + } + + if (isBlob(body)) { + return duplexify(body.arrayBuffer()); + } + + if (isIterable(body)) { + return from(Duplexify, body, { + // TODO (ronag): highWaterMark? + objectMode: true, + writable: false + }); + } + + // TODO: Webstreams. + // if ( + // isReadableStream(body?.readable) && + // isWritableStream(body?.writable) + // ) { + // return Duplexify.fromWeb(body); + // } + + if ( + typeof body?.writable === 'object' || + typeof body?.readable === 'object' + ) { + const readable = body?.readable ? + isReadableNodeStream(body?.readable) ? body?.readable : + duplexify(body.readable) : + undefined; + + const writable = body?.writable ? + isWritableNodeStream(body?.writable) ? body?.writable : + duplexify(body.writable) : + undefined; + + return _duplexify({ readable, writable }); + } + + const then = body?.then; + if (typeof then === 'function') { + let d; + + FunctionPrototypeCall( + then, + body, + (val) => { + if (val != null) { + d.push(val); + } + d.push(null); + }, + (err) => { + destroyer(d, err); + } + ); + + return d = new Duplexify({ + objectMode: true, + writable: false, + read() {} + }); + } + + throw new ERR_INVALID_ARG_TYPE( + name, + ['Blob', 'ReadableStream', 'WritableStream', 'Stream', 'Iterable', + 'AsyncIterable', 'Function', '{ readable, writable } pair', 'Promise'], + body); +}; + +function fromAsyncGen(fn) { + let { promise, resolve } = createDeferredPromise(); + const ac = new AbortController(); + const signal = ac.signal; + const value = fn(async function*() { + while (true) { + const _promise = promise; + promise = null; + const { chunk, done, cb } = await _promise; + process.nextTick(cb); + if (done) return; + if (signal.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + ({ promise, resolve } = createDeferredPromise()); + yield chunk; + } + }(), { signal }); + + return { + value, + write(chunk, encoding, cb) { + const _resolve = resolve; + resolve = null; + _resolve({ chunk, done: false, cb }); + }, + final(cb) { + const _resolve = resolve; + resolve = null; + _resolve({ done: true, cb }); + }, + destroy(err, cb) { + ac.abort(); + cb(err); + } + }; +} + +function _duplexify(pair) { + const r = pair.readable && typeof pair.readable.read !== 'function' ? + Readable.wrap(pair.readable) : pair.readable; + const w = pair.writable; + + let readable = !!isReadable(r); + let writable = !!isWritable(w); + + let ondrain; + let onfinish; + let onreadable; + let onclose; + let d; + + function onfinished(err) { + const cb = onclose; + onclose = null; + + if (cb) { + cb(err); + } else if (err) { + d.destroy(err); + } else if (!readable && !writable) { + d.destroy(); + } + } + + // TODO(ronag): Avoid double buffering. + // Implement Writable/Readable/Duplex traits. + // See, https://github.com/nodejs/node/pull/33515. + d = new Duplexify({ + // TODO (ronag): highWaterMark? + readableObjectMode: !!r?.readableObjectMode, + writableObjectMode: !!w?.writableObjectMode, + readable, + writable, + }); + + if (writable) { + eos(w, (err) => { + writable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + + d._write = function(chunk, encoding, callback) { + if (w.write(chunk, encoding)) { + callback(); + } else { + ondrain = callback; + } + }; + + d._final = function(callback) { + w.end(); + onfinish = callback; + }; + + w.on('drain', function() { + if (ondrain) { + const cb = ondrain; + ondrain = null; + cb(); + } + }); + + w.on('finish', function() { + if (onfinish) { + const cb = onfinish; + onfinish = null; + cb(); + } + }); + } + + if (readable) { + eos(r, (err) => { + readable = false; + if (err) { + destroyer(r, err); + } + onfinished(err); + }); + + r.on('readable', function() { + if (onreadable) { + const cb = onreadable; + onreadable = null; + cb(); + } + }); + + r.on('end', function() { + d.push(null); + }); + + d._read = function() { + while (true) { + const buf = r.read(); + + if (buf === null) { + onreadable = d._read; + return; + } + + if (!d.push(buf)) { + return; + } + } + }; + } + + d._destroy = function(err, callback) { + if (!err && onclose !== null) { + err = new AbortError(); + } + + onreadable = null; + ondrain = null; + onfinish = null; + + if (onclose === null) { + callback(err); + } else { + onclose = callback; + destroyer(w, err); + destroyer(r, err); + } + }; + + return d; +} diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000000..d4e8aa6ad1 --- /dev/null +++ b/lib/internal/streams/end-of-stream.js @@ -0,0 +1,252 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +const { + AbortError, + codes, +} = require('../errors'); +const { + ERR_STREAM_PREMATURE_CLOSE +} = codes; +const { once } = require('../../util'); +const { + validateAbortSignal, + validateFunction, + validateObject, +} = require('../validators'); + +const { Promise } = require('../primordials'); + +const { + isClosed, + isReadable, + isReadableNodeStream, + isReadableFinished, + isWritable, + isWritableNodeStream, + isWritableFinished, + isNodeStream, + willEmitClose: _willEmitClose, +} = require('./utils'); + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +const nop = () => {}; + +function eos(stream, options, callback) { + if (arguments.length === 2) { + callback = options; + options = {}; + } else if (options == null) { + options = {}; + } else { + validateObject(options, 'options'); + } + validateFunction(callback, 'callback'); + validateAbortSignal(options.signal, 'options.signal'); + + callback = once(callback); + + const readable = options.readable ?? isReadableNodeStream(stream); + const writable = options.writable ?? isWritableNodeStream(stream); + + if (!isNodeStream(stream)) { + // TODO: Webstreams. + // TODO: Throw INVALID_ARG_TYPE. + } + + const wState = stream._writableState; + const rState = stream._readableState; + + const onlegacyfinish = () => { + if (!stream.writable) { + onfinish(); + } + }; + + // TODO (ronag): Improve soft detection to include core modules and + // common ecosystem modules that do properly emit 'close' but fail + // this generic check. + let willEmitClose = ( + _willEmitClose(stream) && + isReadableNodeStream(stream) === readable && + isWritableNodeStream(stream) === writable + ); + + let writableFinished = isWritableFinished(stream, false); + const onfinish = () => { + writableFinished = true; + // Stream should not be destroyed here. If it is that + // means that user space is doing something differently and + // we cannot trust willEmitClose. + if (stream.destroyed) { + willEmitClose = false; + } + + if (willEmitClose && (!stream.readable || readable)) { + return; + } + + if (!readable || readableFinished) { + callback.call(stream); + } + }; + + let readableFinished = isReadableFinished(stream, false); + const onend = () => { + readableFinished = true; + // Stream should not be destroyed here. If it is that + // means that user space is doing something differently and + // we cannot trust willEmitClose. + if (stream.destroyed) { + willEmitClose = false; + } + + if (willEmitClose && (!stream.writable || writable)) { + return; + } + + if (!writable || writableFinished) { + callback.call(stream); + } + }; + + const onerror = (err) => { + callback.call(stream, err); + }; + + let closed = isClosed(stream); + + const onclose = () => { + closed = true; + + const errored = wState?.errored || rState?.errored; + + if (errored && typeof errored !== 'boolean') { + return callback.call(stream, errored); + } + + if (readable && !readableFinished && isReadableNodeStream(stream, true)) { + if (!isReadableFinished(stream, false)) + return callback.call(stream, + new ERR_STREAM_PREMATURE_CLOSE()); + } + if (writable && !writableFinished) { + if (!isWritableFinished(stream, false)) + return callback.call(stream, + new ERR_STREAM_PREMATURE_CLOSE()); + } + + callback.call(stream); + }; + + const onrequest = () => { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + if (!willEmitClose) { + stream.on('abort', onclose); + } + if (stream.req) { + onrequest(); + } else { + stream.on('request', onrequest); + } + } else if (writable && !wState) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + // Not all streams will emit 'close' after 'aborted'. + if (!willEmitClose && typeof stream.aborted === 'boolean') { + stream.on('aborted', onclose); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (options.error !== false) { + stream.on('error', onerror); + } + stream.on('close', onclose); + + if (closed) { + process.nextTick(onclose); + } else if (wState?.errorEmitted || rState?.errorEmitted) { + if (!willEmitClose) { + process.nextTick(onclose); + } + } else if ( + !readable && + (!willEmitClose || isReadable(stream)) && + (writableFinished || isWritable(stream) === false) + ) { + process.nextTick(onclose); + } else if ( + !writable && + (!willEmitClose || isWritable(stream)) && + (readableFinished || isReadable(stream) === false) + ) { + process.nextTick(onclose); + } else if ((rState && stream.req && stream.aborted)) { + process.nextTick(onclose); + } + + const cleanup = () => { + callback = nop; + stream.removeListener('aborted', onclose); + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; + + if (options.signal && !closed) { + const abort = () => { + // Keep it because cleanup removes it. + const endCallback = callback; + cleanup(); + endCallback.call( + stream, + new AbortError(undefined, { cause: options.signal.reason })); + }; + if (options.signal.aborted) { + process.nextTick(abort); + } else { + const originalCallback = callback; + callback = once((...args) => { + options.signal.removeEventListener('abort', abort); + originalCallback.apply(stream, args); + }); + options.signal.addEventListener('abort', abort); + } + } + + return cleanup; +} + +function finished(stream, opts) { + return new Promise((resolve, reject) => { + eos(stream, opts, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); +} + +module.exports = eos; +module.exports.finished = finished; diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js new file mode 100644 index 0000000000..d00a9e8a88 --- /dev/null +++ b/lib/internal/streams/from.js @@ -0,0 +1,113 @@ +'use strict'; + +const { + PromisePrototypeThen, + SymbolAsyncIterator, + SymbolIterator, +} = require('../primordials'); +const { Buffer } = require('buffer'); + +const { + ERR_INVALID_ARG_TYPE, + ERR_STREAM_NULL_VALUES +} = require('../errors').codes; + +function from(Readable, iterable, opts) { + let iterator; + if (typeof iterable === 'string' || iterable instanceof Buffer) { + return new Readable({ + objectMode: true, + ...opts, + read() { + this.push(iterable); + this.push(null); + } + }); + } + + let isAsync; + if (iterable && iterable[SymbolAsyncIterator]) { + isAsync = true; + iterator = iterable[SymbolAsyncIterator](); + } else if (iterable && iterable[SymbolIterator]) { + isAsync = false; + iterator = iterable[SymbolIterator](); + } else { + throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + } + + const readable = new Readable({ + objectMode: true, + highWaterMark: 1, + // TODO(ronag): What options should be allowed? + ...opts, + }); + + // Flag to protect against _read + // being called before last iteration completion. + let reading = false; + + readable._read = function() { + if (!reading) { + reading = true; + next(); + } + }; + + readable._destroy = function(error, cb) { + PromisePrototypeThen( + close(error), + () => process.nextTick(cb, error), // nextTick is here in case cb throws + (e) => process.nextTick(cb, e || error), + ); + }; + + async function close(error) { + const hadError = (error !== undefined) && (error !== null); + const hasThrow = typeof iterator.throw === 'function'; + if (hadError && hasThrow) { + const { value, done } = await iterator.throw(error); + await value; + if (done) { + return; + } + } + if (typeof iterator.return === 'function') { + const { value } = await iterator.return(); + await value; + } + } + + async function next() { + for (;;) { + try { + const { value, done } = isAsync ? + await iterator.next() : + iterator.next(); + + if (done) { + readable.push(null); + } else { + const res = (value && + typeof value.then === 'function') ? + await value : + value; + if (res === null) { + reading = false; + throw new ERR_STREAM_NULL_VALUES(); + } else if (readable.push(res)) { + continue; + } else { + reading = false; + } + } + } catch (err) { + readable.destroy(err); + } + break; + } + } + return readable; +} + +module.exports = from; diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js new file mode 100644 index 0000000000..45949f0706 --- /dev/null +++ b/lib/internal/streams/lazy_transform.js @@ -0,0 +1,63 @@ +// LazyTransform is a special type of Transform stream that is lazily loaded. +// This is used for performance with bi-API-ship: when two APIs are available +// for the stream, one conventional and one non-conventional. +'use strict'; + +const { + ObjectDefineProperties, + ObjectDefineProperty, + ObjectSetPrototypeOf, +} = require('../primordials'); + +const stream = require('stream'); + +const { + getDefaultEncoding +} = require('../crypto/util'); + +module.exports = LazyTransform; + +function LazyTransform(options) { + this._options = options; +} +ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype); +ObjectSetPrototypeOf(LazyTransform, stream.Transform); + +function makeGetter(name) { + return function() { + stream.Transform.call(this, this._options); + this._writableState.decodeStrings = false; + + if (!this._options || !this._options.defaultEncoding) { + this._writableState.defaultEncoding = getDefaultEncoding(); + } + + return this[name]; + }; +} + +function makeSetter(name) { + return function(val) { + ObjectDefineProperty(this, name, { + value: val, + enumerable: true, + configurable: true, + writable: true + }); + }; +} + +ObjectDefineProperties(LazyTransform.prototype, { + _readableState: { + get: makeGetter('_readableState'), + set: makeSetter('_readableState'), + configurable: true, + enumerable: true + }, + _writableState: { + get: makeGetter('_writableState'), + set: makeSetter('_writableState'), + configurable: true, + enumerable: true + } +}); diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js new file mode 100644 index 0000000000..0015a134e9 --- /dev/null +++ b/lib/internal/streams/legacy.js @@ -0,0 +1,114 @@ +'use strict'; + +const { + ArrayIsArray, + ObjectSetPrototypeOf, +} = require('../primordials'); + +const EE = require('events'); + +function Stream(opts) { + EE.call(this, opts); +} +ObjectSetPrototypeOf(Stream.prototype, EE.prototype); +ObjectSetPrototypeOf(Stream, EE); + +Stream.prototype.pipe = function(dest, options) { + const source = this; + + function ondata(chunk) { + if (dest.writable && dest.write(chunk) === false && source.pause) { + source.pause(); + } + } + + source.on('data', ondata); + + function ondrain() { + if (source.readable && source.resume) { + source.resume(); + } + } + + dest.on('drain', ondrain); + + // If the 'end' option is not supplied, dest.end() will be called when + // source gets the 'end' or 'close' events. Only dest.end() once. + if (!dest._isStdio && (!options || options.end !== false)) { + source.on('end', onend); + source.on('close', onclose); + } + + let didOnEnd = false; + function onend() { + if (didOnEnd) return; + didOnEnd = true; + + dest.end(); + } + + + function onclose() { + if (didOnEnd) return; + didOnEnd = true; + + if (typeof dest.destroy === 'function') dest.destroy(); + } + + // Don't leave dangling pipes when there are errors. + function onerror(er) { + cleanup(); + if (EE.listenerCount(this, 'error') === 0) { + this.emit('error', er); + } + } + + prependListener(source, 'error', onerror); + prependListener(dest, 'error', onerror); + + // Remove all the event listeners that were added. + function cleanup() { + source.removeListener('data', ondata); + dest.removeListener('drain', ondrain); + + source.removeListener('end', onend); + source.removeListener('close', onclose); + + source.removeListener('error', onerror); + dest.removeListener('error', onerror); + + source.removeListener('end', cleanup); + source.removeListener('close', cleanup); + + dest.removeListener('close', cleanup); + } + + source.on('end', cleanup); + source.on('close', cleanup); + + dest.on('close', cleanup); + dest.emit('pipe', source); + + // Allow for unix-like usage: A.pipe(B).pipe(C) + return dest; +}; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') + return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) + emitter.on(event, fn); + else if (ArrayIsArray(emitter._events[event])) + emitter._events[event].unshift(fn); + else + emitter._events[event] = [fn, emitter._events[event]]; +} + +module.exports = { Stream, prependListener }; diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js new file mode 100644 index 0000000000..5d895bff95 --- /dev/null +++ b/lib/internal/streams/operators.js @@ -0,0 +1,409 @@ +'use strict'; + + + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; + } + + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + }, + AbortError, +} = require('../errors'); +const { + validateAbortSignal, + validateInteger, + validateObject, +} = require('../validators'); +const kWeakHandler = require('../primordials').Symbol('kWeak'); +const { finished } = require('./end-of-stream'); + +const { + ArrayPrototypePush, + MathFloor, + Number, + NumberIsNaN, + Promise, + PromiseReject, + PromisePrototypeCatch, + Symbol, +} = require('../primordials'); + +const kEmpty = Symbol('kEmpty'); +const kEof = Symbol('kEof'); + +function map(fn, options) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE( + 'fn', ['Function', 'AsyncFunction'], fn); + } + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + let concurrency = 1; + if (options?.concurrency != null) { + concurrency = MathFloor(options.concurrency); + } + + validateInteger(concurrency, 'concurrency', 1); + + return async function* map() { + const ac = new AbortController(); + const stream = this; + const queue = []; + const signal = ac.signal; + const signalOpt = { signal }; + + const abort = () => ac.abort(); + if (options?.signal?.aborted) { + abort(); + } + + options?.signal?.addEventListener('abort', abort); + + let next; + let resume; + let done = false; + + function onDone() { + done = true; + } + + async function pump() { + try { + for await (let val of stream) { + if (done) { + return; + } + + if (signal.aborted) { + throw new AbortError(); + } + + try { + val = fn(val, signalOpt); + } catch (err) { + val = PromiseReject(err); + } + + if (val === kEmpty) { + continue; + } + + if (typeof val?.catch === 'function') { + val.catch(onDone); + } + + queue.push(val); + if (next) { + next(); + next = null; + } + + if (!done && queue.length && queue.length >= concurrency) { + await new Promise((resolve) => { + resume = resolve; + }); + } + } + queue.push(kEof); + } catch (err) { + const val = PromiseReject(err); + PromisePrototypeCatch(val, onDone); + queue.push(val); + } finally { + done = true; + if (next) { + next(); + next = null; + } + options?.signal?.removeEventListener('abort', abort); + } + } + + pump(); + + try { + while (true) { + while (queue.length > 0) { + const val = await queue[0]; + + if (val === kEof) { + return; + } + + if (signal.aborted) { + throw new AbortError(); + } + + if (val !== kEmpty) { + yield val; + } + + queue.shift(); + if (resume) { + resume(); + resume = null; + } + } + + await new Promise((resolve) => { + next = resolve; + }); + } + } finally { + ac.abort(); + + done = true; + if (resume) { + resume(); + resume = null; + } + } + }.call(this); +} + +function asIndexedPairs(options = undefined) { + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + return async function* asIndexedPairs() { + let index = 0; + for await (const val of this) { + if (options?.signal?.aborted) { + throw new AbortError({ cause: options.signal.reason }); + } + yield [index++, val]; + } + }.call(this); +} + +async function some(fn, options = undefined) { + // eslint-disable-next-line no-unused-vars + for await (const unused of filter.call(this, fn, options)) { + return true; + } + return false; +} + +async function every(fn, options = undefined) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE( + 'fn', ['Function', 'AsyncFunction'], fn); + } + // https://en.wikipedia.org/wiki/De_Morgan%27s_laws + return !(await some.call(this, async (...args) => { + return !(await fn(...args)); + }, options)); +} + +async function find(fn, options) { + for await (const result of filter.call(this, fn, options)) { + return result; + } + return undefined; +} + +async function forEach(fn, options) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE( + 'fn', ['Function', 'AsyncFunction'], fn); + } + async function forEachFn(value, options) { + await fn(value, options); + return kEmpty; + } + // eslint-disable-next-line no-unused-vars + for await (const unused of map.call(this, forEachFn, options)); +} + +function filter(fn, options) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE( + 'fn', ['Function', 'AsyncFunction'], fn); + } + async function filterFn(value, options) { + if (await fn(value, options)) { + return value; + } + return kEmpty; + } + return map.call(this, filterFn, options); +} + +// Specific to provide better error to reduce since the argument is only +// missing if the stream has no items in it - but the code is still appropriate +class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS { + constructor() { + super('reduce'); + this.message = 'Reduce of an empty stream requires an initial value'; + } +} + +async function reduce(reducer, initialValue, options) { + if (typeof reducer !== 'function') { + throw new ERR_INVALID_ARG_TYPE( + 'reducer', ['Function', 'AsyncFunction'], reducer); + } + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + let hasInitialValue = arguments.length > 1; + if (options?.signal?.aborted) { + const err = new AbortError(undefined, { cause: options.signal.reason }); + this.once('error', () => {}); // The error is already propagated + await finished(this.destroy(err)); + throw err; + } + const ac = new AbortController(); + const signal = ac.signal; + if (options?.signal) { + const opts = { once: true, [kWeakHandler]: this }; + options.signal.addEventListener('abort', () => ac.abort(), opts); + } + let gotAnyItemFromStream = false; + try { + for await (const value of this) { + gotAnyItemFromStream = true; + if (options?.signal?.aborted) { + throw new AbortError(); + } + if (!hasInitialValue) { + initialValue = value; + hasInitialValue = true; + } else { + initialValue = await reducer(initialValue, value, { signal }); + } + } + if (!gotAnyItemFromStream && !hasInitialValue) { + throw new ReduceAwareErrMissingArgs(); + } + } finally { + ac.abort(); + } + return initialValue; +} + +async function toArray(options) { + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + const result = []; + for await (const val of this) { + if (options?.signal?.aborted) { + throw new AbortError(undefined, { cause: options.signal.reason }); + } + ArrayPrototypePush(result, val); + } + return result; +} + +function flatMap(fn, options) { + const values = map.call(this, fn, options); + return async function* flatMap() { + for await (const val of values) { + yield* val; + } + }.call(this); +} + +function toIntegerOrInfinity(number) { + // We coerce here to align with the spec + // https://github.com/tc39/proposal-iterator-helpers/issues/169 + number = Number(number); + if (NumberIsNaN(number)) { + return 0; + } + if (number < 0) { + throw new ERR_OUT_OF_RANGE('number', '>= 0', number); + } + return number; +} + +function drop(number, options = undefined) { + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + number = toIntegerOrInfinity(number); + return async function* drop() { + if (options?.signal?.aborted) { + throw new AbortError(); + } + for await (const val of this) { + if (options?.signal?.aborted) { + throw new AbortError(); + } + if (number-- <= 0) { + yield val; + } + } + }.call(this); +} + +function take(number, options = undefined) { + if (options != null) { + validateObject(options, 'options'); + } + if (options?.signal != null) { + validateAbortSignal(options.signal, 'options.signal'); + } + + number = toIntegerOrInfinity(number); + return async function* take() { + if (options?.signal?.aborted) { + throw new AbortError(); + } + for await (const val of this) { + if (options?.signal?.aborted) { + throw new AbortError(); + } + if (number-- > 0) { + yield val; + } else { + return; + } + } + }.call(this); +} + +module.exports.streamReturningOperators = { + asIndexedPairs, + drop, + filter, + flatMap, + map, + take, +}; + +module.exports.promiseReturningOperators = { + every, + forEach, + reduce, + toArray, + some, + find, +}; diff --git a/lib/internal/streams/passthrough.js b/lib/internal/streams/passthrough.js new file mode 100644 index 0000000000..4320521185 --- /dev/null +++ b/lib/internal/streams/passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +const { + ObjectSetPrototypeOf, +} = require('../primordials'); + +module.exports = PassThrough; + +const Transform = require('./transform'); +ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype); +ObjectSetPrototypeOf(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) + return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function(chunk, encoding, cb) { + cb(null, chunk); +}; diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js new file mode 100644 index 0000000000..be1a7bdc92 --- /dev/null +++ b/lib/internal/streams/pipeline.js @@ -0,0 +1,357 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +const { + ArrayIsArray, + Promise, + SymbolAsyncIterator, +} = require('../primordials'); + +const eos = require('./end-of-stream'); +const { once } = require('../../util'); +const destroyImpl = require('./destroy'); +const Duplex = require('./duplex'); +const { + aggregateTwoErrors, + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_RETURN_VALUE, + ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED, + }, + AbortError, +} = require('../errors'); + +const { + validateCallback, + validateAbortSignal +} = require('../validators'); + +const { + isIterable, + isReadableNodeStream, + isNodeStream, +} = require('./utils'); + + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; + } + + +let PassThrough; +let Readable; + +function destroyer(stream, reading, writing) { + let finished = false; + stream.on('close', () => { + finished = true; + }); + + eos(stream, { readable: reading, writable: writing }, (err) => { + finished = !err; + }); + + return (err) => { + if (finished) return; + finished = true; + destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function popCallback(streams) { + // Streams should never be an empty array. It should always contain at least + // a single stream. Therefore optimize for the average case instead of + // checking for length === 0 as well. + validateCallback(streams[streams.length - 1]); + return streams.pop(); +} + +function makeAsyncIterable(val) { + if (isIterable(val)) { + return val; + } else if (isReadableNodeStream(val)) { + // Legacy streams are not Iterable. + return fromReadable(val); + } + throw new ERR_INVALID_ARG_TYPE( + 'val', ['Readable', 'Iterable', 'AsyncIterable'], val); +} + +async function* fromReadable(val) { + if (!Readable) { + Readable = require('./readable'); + } + + yield* Readable.prototype[SymbolAsyncIterator].call(val); +} + +async function pump(iterable, writable, finish, { end }) { + let error; + let onresolve = null; + + const resume = (err) => { + if (err) { + error = err; + } + + if (onresolve) { + const callback = onresolve; + onresolve = null; + callback(); + } + }; + + const wait = () => new Promise((resolve, reject) => { + if (error) { + reject(error); + } else { + onresolve = () => { + if (error) { + reject(error); + } else { + resolve(); + } + }; + } + }); + + writable.on('drain', resume); + const cleanup = eos(writable, { readable: false }, resume); + + try { + if (writable.writableNeedDrain) { + await wait(); + } + + for await (const chunk of iterable) { + if (!writable.write(chunk)) { + await wait(); + } + } + + if (end) { + writable.end(); + } + + await wait(); + + finish(); + } catch (err) { + finish(error !== err ? aggregateTwoErrors(error, err) : err); + } finally { + cleanup(); + writable.off('drain', resume); + } +} + +function pipeline(...streams) { + return pipelineImpl(streams, once(popCallback(streams))); +} + +function pipelineImpl(streams, callback, opts) { + if (streams.length === 1 && ArrayIsArray(streams[0])) { + streams = streams[0]; + } + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + const ac = new AbortController(); + const signal = ac.signal; + const outerSignal = opts?.signal; + + validateAbortSignal(outerSignal, 'options.signal'); + + function abort() { + finishImpl(new AbortError()); + } + + outerSignal?.addEventListener('abort', abort); + + let error; + let value; + const destroys = []; + + let finishCount = 0; + + function finish(err) { + finishImpl(err, --finishCount === 0); + } + + function finishImpl(err, final) { + if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) { + error = err; + } + + if (!error && !final) { + return; + } + + while (destroys.length) { + destroys.shift()(error); + } + + outerSignal?.removeEventListener('abort', abort); + ac.abort(); + + if (final) { + process.nextTick(callback, error, value); + } + } + + let ret; + for (let i = 0; i < streams.length; i++) { + const stream = streams[i]; + const reading = i < streams.length - 1; + const writing = i > 0; + const end = reading || opts?.end !== false; + + if (isNodeStream(stream)) { + if (end) { + destroys.push(destroyer(stream, reading, writing)); + } + + // Catch stream errors that occur after pipe/pump has completed. + stream.on('error', (err) => { + if ( + err && + err.name !== 'AbortError' && + err.code !== 'ERR_STREAM_PREMATURE_CLOSE' + ) { + finish(err); + } + }); + } + + if (i === 0) { + if (typeof stream === 'function') { + ret = stream({ signal }); + if (!isIterable(ret)) { + throw new ERR_INVALID_RETURN_VALUE( + 'Iterable, AsyncIterable or Stream', 'source', ret); + } + } else if (isIterable(stream) || isReadableNodeStream(stream)) { + ret = stream; + } else { + ret = Duplex.from(stream); + } + } else if (typeof stream === 'function') { + ret = makeAsyncIterable(ret); + ret = stream(ret, { signal }); + + if (reading) { + if (!isIterable(ret, true)) { + throw new ERR_INVALID_RETURN_VALUE( + 'AsyncIterable', `transform[${i - 1}]`, ret); + } + } else { + if (!PassThrough) { + PassThrough = require('./passthrough'); + } + + // If the last argument to pipeline is not a stream + // we must create a proxy stream so that pipeline(...) + // always returns a stream which can be further + // composed through `.pipe(stream)`. + + const pt = new PassThrough({ + objectMode: true + }); + + // Handle Promises/A+ spec, `then` could be a getter that throws on + // second use. + const then = ret?.then; + if (typeof then === 'function') { + finishCount++; + then.call(ret, + (val) => { + value = val; + if (val != null) { + pt.write(val); + } + if (end) { + pt.end(); + } + process.nextTick(finish); + }, (err) => { + pt.destroy(err); + process.nextTick(finish, err); + }, + ); + } else if (isIterable(ret, true)) { + finishCount++; + pump(ret, pt, finish, { end }); + } else { + throw new ERR_INVALID_RETURN_VALUE( + 'AsyncIterable or Promise', 'destination', ret); + } + + ret = pt; + + destroys.push(destroyer(ret, false, true)); + } + } else if (isNodeStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount += 2; + pipe(ret, stream, finish, { end }); + } else if (isIterable(ret)) { + finishCount++; + pump(ret, stream, finish, { end }); + } else { + throw new ERR_INVALID_ARG_TYPE( + 'val', ['Readable', 'Iterable', 'AsyncIterable'], ret); + } + ret = stream; + } else { + ret = Duplex.from(stream); + } + } + + if (signal?.aborted || outerSignal?.aborted) { + process.nextTick(abort); + } + + return ret; +} + +function pipe(src, dst, finish, { end }) { + src.pipe(dst, { end }); + + if (end) { + // Compat. Before node v10.12.0 stdio used to throw an error so + // pipe() did/does not end() stdio destinations. + // Now they allow it but "secretly" don't close the underlying fd. + src.once('end', () => dst.end()); + } else { + finish(); + } + + eos(src, { readable: true, writable: false }, (err) => { + const rState = src._readableState; + if ( + err && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' && + (rState && rState.ended && !rState.errored && !rState.errorEmitted) + ) { + // Some readable streams will emit 'close' before 'end'. However, since + // this is on the readable side 'end' should still be emitted if the + // stream has been ended and no error emitted. This should be allowed in + // favor of backwards compatibility. Since the stream is piped to a + // destination this should not result in any observable difference. + // We don't need to check if this is a writable premature close since + // eos will only fail with premature close on the reading side for + // duplex streams. + src + .once('end', finish) + .once('error', finish); + } else { + finish(err); + } + }); + eos(dst, { readable: false, writable: true }, finish); +} + +module.exports = { pipelineImpl, pipeline }; diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js new file mode 100644 index 0000000000..b01d40b564 --- /dev/null +++ b/lib/internal/streams/readable.js @@ -0,0 +1,1407 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +const { + ArrayPrototypeIndexOf, + NumberIsInteger, + NumberIsNaN, + NumberParseInt, + ObjectDefineProperties, + ObjectKeys, + ObjectSetPrototypeOf, + Promise, + SafeSet, + SymbolAsyncIterator, + Symbol +} = require('../primordials'); + +module.exports = Readable; +Readable.ReadableState = ReadableState; + +const EE = require('events'); +const { Stream, prependListener } = require('./legacy'); +const { Buffer } = require('buffer'); + +const { + addAbortSignal, +} = require('./add-abort-signal'); +const eos = require('./end-of-stream'); + +let debug = require('../../util').debuglog('stream', (fn) => { + debug = fn; +}); +const BufferList = require('./buffer_list'); +const destroyImpl = require('./destroy'); +const { + getHighWaterMark, + getDefaultHighWaterMark +} = require('./state'); + +const { + aggregateTwoErrors, + codes: { + ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED, + ERR_OUT_OF_RANGE, + ERR_STREAM_PUSH_AFTER_EOF, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT, + } +} = require('../errors'); +const { validateObject } = require('../validators'); + +const kPaused = Symbol('kPaused'); + +const { StringDecoder } = require('string_decoder'); +const from = require('./from'); + +ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); +ObjectSetPrototypeOf(Readable, Stream); +const nop = () => {}; + +const { errorOrDestroy } = destroyImpl; + +function ReadableState(options, stream, isDuplex) { + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') + isDuplex = stream instanceof require('./duplex'); + + // Object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away. + this.objectMode = !!(options && options.objectMode); + + if (isDuplex) + this.objectMode = this.objectMode || + !!(options && options.readableObjectMode); + + // The point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = options ? + getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex) : + getDefaultHighWaterMark(false); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift(). + this.buffer = new BufferList(); + this.length = 0; + this.pipes = []; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // Stream is still being constructed and cannot be + // destroyed until construction finished or failed. + // Async construction is opt in, therefore we start as + // constructed. + this.constructed = true; + + // A flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // Whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this[kPaused] = null; + + // True if the error was already emitted and should not be thrown again. + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = !options || options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish'). + this.autoDestroy = !options || options.autoDestroy !== false; + + // Has it been destroyed. + this.destroyed = false; + + // Indicates whether the stream has errored. When true no further + // _read calls, 'data' or 'readable' events should occur. This is needed + // since when autoDestroy is disabled we need a way to tell whether the + // stream has failed. + this.errored = null; + + // Indicates whether the stream has finished destroying. + this.closed = false; + + // True if close has been emitted or would have been emitted + // depending on emitClose. + this.closeEmitted = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'; + + // Ref the piped dest which we need a drain event on it + // type: null | Writable | Set. + this.awaitDrainWriters = null; + this.multiAwaitDrain = false; + + // If true, a maybeReadMore has been scheduled. + this.readingMore = false; + + this.dataEmitted = false; + + this.decoder = null; + this.encoding = null; + if (options && options.encoding) { + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + + +function Readable(options) { + if (!(this instanceof Readable)) + return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5. + const isDuplex = this instanceof require('./duplex'); + + this._readableState = new ReadableState(options, this, isDuplex); + + if (options) { + if (typeof options.read === 'function') + this._read = options.read; + + if (typeof options.destroy === 'function') + this._destroy = options.destroy; + + if (typeof options.construct === 'function') + this._construct = options.construct; + + if (options.signal && !isDuplex) + addAbortSignal(options.signal, this); + } + + Stream.call(this, options); + + destroyImpl.construct(this, () => { + if (this._readableState.needReadable) { + maybeReadMore(this, this._readableState); + } + }); +} + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function(err, cb) { + cb(err); +}; + +Readable.prototype[EE.captureRejectionSymbol] = function(err) { + this.destroy(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function(chunk, encoding) { + return readableAddChunk(this, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read(). +Readable.prototype.unshift = function(chunk, encoding) { + return readableAddChunk(this, chunk, encoding, true); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront) { + debug('readableAddChunk', chunk); + const state = stream._readableState; + + let err; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (state.encoding !== encoding) { + if (addToFront && state.encoding) { + // When unshifting, if state.encoding is set, we have to save + // the string in the BufferList with the state encoding. + chunk = Buffer.from(chunk, encoding).toString(state.encoding); + } else { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + } + } else if (chunk instanceof Buffer) { + encoding = ''; + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = ''; + } else if (chunk != null) { + err = new ERR_INVALID_ARG_TYPE( + 'chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + } + + if (err) { + errorOrDestroy(stream, err); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || (chunk && chunk.length > 0)) { + if (addToFront) { + if (state.endEmitted) + errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); + else if (state.destroyed || state.errored) + return false; + else + addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed || state.errored) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) + addChunk(stream, state, chunk, false); + else + maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && + (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync && + stream.listenerCount('data') > 0) { + // Use the guard to avoid creating `Set()` repeatedly + // when we have multiple pipes. + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + + state.dataEmitted = true; + stream.emit('data', chunk); + } else { + // Update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) + state.buffer.unshift(chunk); + else + state.buffer.push(chunk); + + if (state.needReadable) + emitReadable(stream); + } + maybeReadMore(stream, state); +} + +Readable.prototype.isPaused = function() { + const state = this._readableState; + return state[kPaused] === true || state.flowing === false; +}; + +// Backwards compatibility. +Readable.prototype.setEncoding = function(enc) { + const decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8. + this._readableState.encoding = this._readableState.decoder.encoding; + + const buffer = this._readableState.buffer; + // Iterate over current buffer to convert already stored Buffers: + let content = ''; + for (const data of buffer) { + content += decoder.write(data); + } + buffer.clear(); + if (content !== '') + buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB. +const MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n > MAX_HWM) { + throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n); + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts. + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || (state.length === 0 && state.ended)) + return 0; + if (state.objectMode) + return 1; + if (NumberIsNaN(n)) { + // Only flow one buffer at a time. + if (state.flowing && state.length) + return state.buffer.first().length; + return state.length; + } + if (n <= state.length) + return n; + return state.ended ? state.length : 0; +} + +// You can override either this method, or the async _read(n) below. +Readable.prototype.read = function(n) { + debug('read', n); + // Same as parseInt(undefined, 10), however V8 7.3 performance regressed + // in this scenario, so we are doing it manually. + if (n === undefined) { + n = NaN; + } else if (!NumberIsInteger(n)) { + n = NumberParseInt(n, 10); + } + const state = this._readableState; + const nOrig = n; + + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) + state.highWaterMark = computeNewHighWaterMark(n); + + if (n !== 0) + state.emittedReadable = false; + + // If we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && + state.needReadable && + ((state.highWaterMark !== 0 ? + state.length >= state.highWaterMark : + state.length > 0) || + state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) + endReadable(this); + else + emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // If we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) + endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + let doRead = state.needReadable; + debug('need readable', doRead); + + // If we currently have less than the highWaterMark, then also read some. + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // However, if we've ended, then there's no point, if we're already + // reading, then it's unnecessary, if we're constructing we have to wait, + // and if we're destroyed or errored, then it's not allowed, + if (state.ended || state.reading || state.destroyed || state.errored || + !state.constructed) { + doRead = false; + debug('reading, ended or constructing', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // If the length is currently zero, then we *need* a readable event. + if (state.length === 0) + state.needReadable = true; + + // Call internal read method + try { + const result = this._read(state.highWaterMark); + if (result != null) { + const then = result.then; + if (typeof then === 'function') { + then.call( + result, + nop, + function(err) { + errorOrDestroy(this, err); + }); + } + } + } catch (err) { + errorOrDestroy(this, err); + } + + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) + n = howMuchToRead(nOrig, state); + } + + let ret; + if (n > 0) + ret = fromList(n, state); + else + ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear(); + } else { + state.awaitDrainWriters = null; + } + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) + state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) + endReadable(this); + } + + if (ret !== null && !state.errorEmitted && !state.closeEmitted) { + state.dataEmitted = true; + this.emit('data', ret); + } + + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + const chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + if (state.sync) { + // If we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call. + emitReadable(stream); + } else { + // Emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + state.emittedReadable = true; + // We have to emit readable now that we are EOF. Modules + // in the ecosystem (e.g. dicer) rely on this event being sync. + emitReadable_(stream); + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + const state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + const state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && !state.errored && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if: + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = + !state.flowing && + !state.ended && + state.length <= state.highWaterMark; + flow(stream); +} + + +// At this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore && state.constructed) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && + (state.length < state.highWaterMark || + (state.flowing && state.length === 0))) { + const len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // Didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// Abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function(n) { + throw new ERR_METHOD_NOT_IMPLEMENTED('_read()'); +}; + +Readable.prototype.pipe = function(dest, pipeOpts) { + const src = this; + const state = this._readableState; + + if (state.pipes.length === 1) { + if (!state.multiAwaitDrain) { + state.multiAwaitDrain = true; + state.awaitDrainWriters = new SafeSet( + state.awaitDrainWriters ? [state.awaitDrainWriters] : [] + ); + } + } + + state.pipes.push(dest); + debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts); + + const doEnd = (!pipeOpts || pipeOpts.end !== false) && + dest !== process.stdout && + dest !== process.stderr; + + const endFn = doEnd ? onend : unpipe; + if (state.endEmitted) + process.nextTick(endFn); + else + src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + let ondrain; + + let cleanedUp = false; + function cleanup() { + debug('cleanup'); + // Cleanup event handlers once the pipe is broken. + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + if (ondrain) { + dest.removeListener('drain', ondrain); + } + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // If the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (ondrain && state.awaitDrainWriters && + (!dest._writableState || dest._writableState.needDrain)) + ondrain(); + } + + function pause() { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if (!cleanedUp) { + if (state.pipes.length === 1 && state.pipes[0] === dest) { + debug('false write response, pause', 0); + state.awaitDrainWriters = dest; + state.multiAwaitDrain = false; + } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { + debug('false write response, pause', state.awaitDrainWriters.size); + state.awaitDrainWriters.add(dest); + } + src.pause(); + } + if (!ondrain) { + // When the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + ondrain = pipeOnDrain(src, dest); + dest.on('drain', ondrain); + } + } + + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + const ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + pause(); + } + } + + // If the dest has an error, then stop piping into it. + // However, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (dest.listenerCount('error') === 0) { + const s = dest._writableState || dest._readableState; + if (s && !s.errorEmitted) { + // User incorrectly emitted 'error' directly on the stream. + errorOrDestroy(dest, er); + } else { + dest.emit('error', er); + } + } + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // Tell the dest that it's being piped to. + dest.emit('pipe', src); + + // Start the flow if it hasn't been started already. + + if (dest.writableNeedDrain === true) { + if (state.flowing) { + pause(); + } + } else if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src, dest) { + return function pipeOnDrainFunctionResult() { + const state = src._readableState; + + // `ondrain` will call directly, + // `this` maybe not a reference to dest, + // so we use the real dest here. + if (state.awaitDrainWriters === dest) { + debug('pipeOnDrain', 1); + state.awaitDrainWriters = null; + } else if (state.multiAwaitDrain) { + debug('pipeOnDrain', state.awaitDrainWriters.size); + state.awaitDrainWriters.delete(dest); + } + + if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && + src.listenerCount('data')) { + src.resume(); + } + }; +} + + +Readable.prototype.unpipe = function(dest) { + const state = this._readableState; + const unpipeInfo = { hasUnpiped: false }; + + // If we're not piping anywhere, then do nothing. + if (state.pipes.length === 0) + return this; + + if (!dest) { + // remove all. + const dests = state.pipes; + state.pipes = []; + this.pause(); + + for (let i = 0; i < dests.length; i++) + dests[i].emit('unpipe', this, { hasUnpiped: false }); + return this; + } + + // Try to find the right one. + const index = ArrayPrototypeIndexOf(state.pipes, dest); + if (index === -1) + return this; + + state.pipes.splice(index, 1); + if (state.pipes.length === 0) + this.pause(); + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// Set up data events if they are asked for +// Ensure readable listeners eventually get something. +Readable.prototype.on = function(ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn); + const state = this._readableState; + + if (ev === 'data') { + // Update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused. + if (state.flowing !== false) + this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function(ev, fn) { + const res = Stream.prototype.removeListener.call(this, + ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; +Readable.prototype.off = Readable.prototype.removeListener; + +Readable.prototype.removeAllListeners = function(ev) { + const res = Stream.prototype.removeAllListeners.apply(this, + arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + const state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && state[kPaused] === false) { + // Flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // Crude way to check if we should resume. + } else if (self.listenerCount('data') > 0) { + self.resume(); + } else if (!state.readableListening) { + state.flowing = null; + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function() { + const state = this._readableState; + if (!state.flowing) { + debug('resume'); + // We flow only if there is no one listening + // for readable, but we still have to call + // resume(). + state.flowing = !state.readableListening; + resume(this, state); + } + state[kPaused] = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) + stream.read(0); +} + +Readable.prototype.pause = function() { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState[kPaused] = true; + return this; +}; + +function flow(stream) { + const state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// Wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function(stream) { + let paused = false; + + // TODO (ronag): Should this.destroy(err) emit + // 'error' on the wrapped stream? Would require + // a static factory method, e.g. Readable.wrap(stream). + + stream.on('data', (chunk) => { + if (!this.push(chunk) && stream.pause) { + paused = true; + stream.pause(); + } + }); + + stream.on('end', () => { + this.push(null); + }); + + stream.on('error', (err) => { + errorOrDestroy(this, err); + }); + + stream.on('close', () => { + this.destroy(); + }); + + stream.on('destroy', () => { + this.destroy(); + }); + + this._read = () => { + if (paused && stream.resume) { + paused = false; + stream.resume(); + } + }; + + // Proxy all the other methods. Important when wrapping filters and duplexes. + const streamKeys = ObjectKeys(stream); + for (let j = 1; j < streamKeys.length; j++) { + const i = streamKeys[j]; + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = stream[i].bind(stream); + } + } + + return this; +}; + +Readable.prototype[SymbolAsyncIterator] = function() { + return streamToAsyncIterator(this); +}; + +Readable.prototype.iterator = function(options) { + if (options !== undefined) { + validateObject(options, 'options'); + } + return streamToAsyncIterator(this, options); +}; + +function streamToAsyncIterator(stream, options) { + if (typeof stream.read !== 'function') { + stream = Readable.wrap(stream, { objectMode: true }); + } + + const iter = createAsyncIterator(stream, options); + iter.stream = stream; + return iter; +} + +async function* createAsyncIterator(stream, options) { + let callback = nop; + + function next(resolve) { + if (this === stream) { + callback(); + callback = nop; + } else { + callback = resolve; + } + } + + stream.on('readable', next); + + let error; + const cleanup = eos(stream, { writable: false }, (err) => { + error = err ? aggregateTwoErrors(error, err) : null; + callback(); + callback = nop; + }); + + try { + while (true) { + const chunk = stream.destroyed ? null : stream.read(); + if (chunk !== null) { + yield chunk; + } else if (error) { + throw error; + } else if (error === null) { + return; + } else { + await new Promise(next); + } + } + } catch (err) { + error = aggregateTwoErrors(error, err); + throw error; + } finally { + if ( + (error || options?.destroyOnReturn !== false) && + (error === undefined || stream._readableState.autoDestroy) + ) { + destroyImpl.destroyer(stream, null); + } else { + stream.off('readable', next); + cleanup(); + } + } +} + +// Making it explicit these properties are not enumerable +// because otherwise some prototype manipulation in +// userland will fail. +ObjectDefineProperties(Readable.prototype, { + readable: { + get() { + const r = this._readableState; + // r.readable === false means that this is part of a Duplex stream + // where the readable side was disabled upon construction. + // Compat. The user might manually disable readable side through + // deprecated setter. + return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && + !r.endEmitted; + }, + set(val) { + // Backwards compat. + if (this._readableState) { + this._readableState.readable = !!val; + } + } + }, + + readableDidRead: { + enumerable: false, + get: function() { + return this._readableState.dataEmitted; + } + }, + + readableAborted: { + enumerable: false, + get: function() { + return !!( + this._readableState.readable !== false && + (this._readableState.destroyed || this._readableState.errored) && + !this._readableState.endEmitted + ); + } + }, + + readableHighWaterMark: { + enumerable: false, + get: function() { + return this._readableState.highWaterMark; + } + }, + + readableBuffer: { + enumerable: false, + get: function() { + return this._readableState && this._readableState.buffer; + } + }, + + readableFlowing: { + enumerable: false, + get: function() { + return this._readableState.flowing; + }, + set: function(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } + }, + + readableLength: { + enumerable: false, + get() { + return this._readableState.length; + } + }, + + readableObjectMode: { + enumerable: false, + get() { + return this._readableState ? this._readableState.objectMode : false; + } + }, + + readableEncoding: { + enumerable: false, + get() { + return this._readableState ? this._readableState.encoding : null; + } + }, + + destroyed: { + enumerable: false, + get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set(value) { + // We ignore the value if the stream + // has not been initialized yet. + if (!this._readableState) { + return; + } + + // Backward compatibility, the user is explicitly + // managing destroyed. + this._readableState.destroyed = value; + } + }, + + readableEnded: { + enumerable: false, + get() { + return this._readableState ? this._readableState.endEmitted : false; + } + }, + +}); + +ObjectDefineProperties(ReadableState.prototype, { + // Legacy getter for `pipesCount`. + pipesCount: { + get() { + return this.pipes.length; + } + }, + + // Legacy property for `paused`. + paused: { + get() { + return this[kPaused] !== false; + }, + set(value) { + this[kPaused] = !!value; + } + } +}); + +// Exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered. + if (state.length === 0) + return null; + + let ret; + if (state.objectMode) + ret = state.buffer.shift(); + else if (!n || n >= state.length) { + // Read it all, truncate the list. + if (state.decoder) + ret = state.buffer.join(''); + else if (state.buffer.length === 1) + ret = state.buffer.first(); + else + ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list. + ret = state.buffer.consume(n, state.decoder); + } + + return ret; +} + +function endReadable(stream) { + const state = stream._readableState; + + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.errored && !state.closeEmitted && + !state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.emit('end'); + + if (stream.writable && stream.allowHalfOpen === false) { + process.nextTick(endWritableNT, stream); + } else if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well. + const wState = stream._writableState; + const autoDestroy = !wState || ( + wState.autoDestroy && + // We don't expect the writable to ever 'finish' + // if writable is explicitly set to false. + (wState.finished || wState.writable === false) + ); + + if (autoDestroy) { + stream.destroy(); + } + } + } +} + +function endWritableNT(stream) { + const writable = stream.writable && !stream.writableEnded && + !stream.destroyed; + if (writable) { + stream.end(); + } +} + +Readable.from = function(iterable, opts) { + return from(Readable, iterable, opts); +}; + +let webStreamsAdapters; + +// Lazy to avoid circular references +function lazyWebStreams() { + if (webStreamsAdapters === undefined) + webStreamsAdapters = {}; + return webStreamsAdapters; +} + +Readable.fromWeb = function(readableStream, options) { + return lazyWebStreams().newStreamReadableFromReadableStream( + readableStream, + options); +}; + +Readable.toWeb = function(streamReadable) { + return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable); +}; + +Readable.wrap = function(src, options) { + return new Readable({ + objectMode: src.readableObjectMode ?? src.objectMode ?? true, + ...options, + destroy(err, callback) { + destroyImpl.destroyer(src, err); + callback(err); + } + }).wrap(src); +}; diff --git a/lib/internal/streams/state.js b/lib/internal/streams/state.js new file mode 100644 index 0000000000..9261fec3b5 --- /dev/null +++ b/lib/internal/streams/state.js @@ -0,0 +1,36 @@ +'use strict'; + +const { + MathFloor, + NumberIsInteger, +} = require('../primordials'); + +const { ERR_INVALID_ARG_VALUE } = require('../errors').codes; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : + isDuplex ? options[duplexKey] : null; +} + +function getDefaultHighWaterMark(objectMode) { + return objectMode ? 16 : 16 * 1024; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!NumberIsInteger(hwm) || hwm < 0) { + const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'; + throw new ERR_INVALID_ARG_VALUE(name, hwm); + } + return MathFloor(hwm); + } + + // Default value + return getDefaultHighWaterMark(state.objectMode); +} + +module.exports = { + getHighWaterMark, + getDefaultHighWaterMark +}; diff --git a/lib/internal/streams/transform.js b/lib/internal/streams/transform.js new file mode 100644 index 0000000000..f48ae3782f --- /dev/null +++ b/lib/internal/streams/transform.js @@ -0,0 +1,247 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +const { + ObjectSetPrototypeOf, + Symbol +} = require('../primordials'); + +module.exports = Transform; +const { + ERR_METHOD_NOT_IMPLEMENTED +} = require('../errors').codes; +const Duplex = require('./duplex'); +ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); +ObjectSetPrototypeOf(Transform, Duplex); + +const kCallback = Symbol('kCallback'); + +function Transform(options) { + if (!(this instanceof Transform)) + return new Transform(options); + + Duplex.call(this, options); + + // We have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + this[kCallback] = null; + + if (options) { + if (typeof options.transform === 'function') + this._transform = options.transform; + + if (typeof options.flush === 'function') + this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + // Backwards compat. Some Transform streams incorrectly implement _final + // instead of or in addition to _flush. By using 'prefinish' instead of + // implementing _final we continue supporting this unfortunate use case. + this.on('prefinish', prefinish); +} + +function final(cb) { + let called = false; + if (typeof this._flush === 'function' && !this.destroyed) { + const result = this._flush((er, data) => { + called = true; + if (er) { + if (cb) { + cb(er); + } else { + this.destroy(er); + } + return; + } + + if (data != null) { + this.push(data); + } + this.push(null); + if (cb) { + cb(); + } + }); + if (result !== undefined && result !== null) { + try { + const then = result.then; + if (typeof then === 'function') { + then.call( + result, + (data) => { + if (called) + return; + if (data != null) + this.push(data); + this.push(null); + if (cb) + process.nextTick(cb); + }, + (err) => { + if (cb) { + process.nextTick(cb, err); + } else { + process.nextTick(() => this.destroy(err)); + } + }); + } + } catch (err) { + process.nextTick(() => this.destroy(err)); + } + } + } else { + this.push(null); + if (cb) { + cb(); + } + } +} + +function prefinish() { + if (this._final !== final) { + final.call(this); + } +} + +Transform.prototype._final = final; + +Transform.prototype._transform = function(chunk, encoding, callback) { + throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()'); +}; + +Transform.prototype._write = function(chunk, encoding, callback) { + const rState = this._readableState; + const wState = this._writableState; + const length = rState.length; + + let called = false; + const result = this._transform(chunk, encoding, (err, val) => { + called = true; + if (err) { + callback(err); + return; + } + + if (val != null) { + this.push(val); + } + + if ( + wState.ended || // Backwards compat. + length === rState.length || // Backwards compat. + rState.length < rState.highWaterMark || + rState.highWaterMark === 0 || + rState.length === 0 + ) { + callback(); + } else { + this[kCallback] = callback; + } + }); + if (result !== undefined && result != null) { + try { + const then = result.then; + if (typeof then === 'function') { + then.call( + result, + (val) => { + if (called) + return; + + if (val != null) { + this.push(val); + } + + if ( + wState.ended || + length === rState.length || + rState.length < rState.highWaterMark || + rState.length === 0) { + process.nextTick(callback); + } else { + this[kCallback] = callback; + } + }, + (err) => { + process.nextTick(callback, err); + }); + } + } catch (err) { + process.nextTick(callback, err); + } + } +}; + +Transform.prototype._read = function() { + if (this[kCallback]) { + const callback = this[kCallback]; + this[kCallback] = null; + callback(); + } +}; diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js new file mode 100644 index 0000000000..bd653c4fc0 --- /dev/null +++ b/lib/internal/streams/utils.js @@ -0,0 +1,260 @@ +'use strict'; + +const { + Symbol, + SymbolAsyncIterator, + SymbolIterator, +} = require('../primordials'); + +const kDestroyed = Symbol('kDestroyed'); +const kIsErrored = Symbol('kIsErrored'); +const kIsReadable = Symbol('kIsReadable'); +const kIsDisturbed = Symbol('kIsDisturbed'); + +function isReadableNodeStream(obj, strict = false) { + return !!( + obj && + typeof obj.pipe === 'function' && + typeof obj.on === 'function' && + ( + !strict || + (typeof obj.pause === 'function' && typeof obj.resume === 'function') + ) && + (!obj._writableState || obj._readableState?.readable !== false) && // Duplex + (!obj._writableState || obj._readableState) // Writable has .pipe. + ); +} + +function isWritableNodeStream(obj) { + return !!( + obj && + typeof obj.write === 'function' && + typeof obj.on === 'function' && + (!obj._readableState || obj._writableState?.writable !== false) // Duplex + ); +} + +function isDuplexNodeStream(obj) { + return !!( + obj && + (typeof obj.pipe === 'function' && obj._readableState) && + typeof obj.on === 'function' && + typeof obj.write === 'function' + ); +} + +function isNodeStream(obj) { + return ( + obj && + ( + obj._readableState || + obj._writableState || + (typeof obj.write === 'function' && typeof obj.on === 'function') || + (typeof obj.pipe === 'function' && typeof obj.on === 'function') + ) + ); +} + +function isIterable(obj, isAsync) { + if (obj == null) return false; + if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'; + if (isAsync === false) return typeof obj[SymbolIterator] === 'function'; + return typeof obj[SymbolAsyncIterator] === 'function' || + typeof obj[SymbolIterator] === 'function'; +} + +function isDestroyed(stream) { + if (!isNodeStream(stream)) return null; + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + return !!(stream.destroyed || stream[kDestroyed] || state?.destroyed); +} + +// Have been end():d. +function isWritableEnded(stream) { + if (!isWritableNodeStream(stream)) return null; + if (stream.writableEnded === true) return true; + const wState = stream._writableState; + if (wState?.errored) return false; + if (typeof wState?.ended !== 'boolean') return null; + return wState.ended; +} + +// Have emitted 'finish'. +function isWritableFinished(stream, strict) { + if (!isWritableNodeStream(stream)) return null; + if (stream.writableFinished === true) return true; + const wState = stream._writableState; + if (wState?.errored) return false; + if (typeof wState?.finished !== 'boolean') return null; + return !!( + wState.finished || + (strict === false && wState.ended === true && wState.length === 0) + ); +} + +// Have been push(null):d. +function isReadableEnded(stream) { + if (!isReadableNodeStream(stream)) return null; + if (stream.readableEnded === true) return true; + const rState = stream._readableState; + if (!rState || rState.errored) return false; + if (typeof rState?.ended !== 'boolean') return null; + return rState.ended; +} + +// Have emitted 'end'. +function isReadableFinished(stream, strict) { + if (!isReadableNodeStream(stream)) return null; + const rState = stream._readableState; + if (rState?.errored) return false; + if (typeof rState?.endEmitted !== 'boolean') return null; + return !!( + rState.endEmitted || + (strict === false && rState.ended === true && rState.length === 0) + ); +} + +function isReadable(stream) { + if (stream && stream[kIsReadable] != null) return stream[kIsReadable]; + if (typeof stream?.readable !== 'boolean') return null; + if (isDestroyed(stream)) return false; + return isReadableNodeStream(stream) && + stream.readable && + !isReadableFinished(stream); +} + +function isWritable(stream) { + if (typeof stream?.writable !== 'boolean') return null; + if (isDestroyed(stream)) return false; + return isWritableNodeStream(stream) && + stream.writable && + !isWritableEnded(stream); +} + +function isFinished(stream, opts) { + if (!isNodeStream(stream)) { + return null; + } + + if (isDestroyed(stream)) { + return true; + } + + if (opts?.readable !== false && isReadable(stream)) { + return false; + } + + if (opts?.writable !== false && isWritable(stream)) { + return false; + } + + return true; +} + +function isClosed(stream) { + if (!isNodeStream(stream)) { + return null; + } + + const wState = stream._writableState; + const rState = stream._readableState; + + if ( + typeof wState?.closed === 'boolean' || + typeof rState?.closed === 'boolean' + ) { + return wState?.closed || rState?.closed; + } + + if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) { + return stream._closed; + } + + return null; +} + +function isOutgoingMessage(stream) { + return ( + typeof stream._closed === 'boolean' && + typeof stream._defaultKeepAlive === 'boolean' && + typeof stream._removedConnection === 'boolean' && + typeof stream._removedContLen === 'boolean' + ); +} + +function isServerResponse(stream) { + return ( + typeof stream._sent100 === 'boolean' && + isOutgoingMessage(stream) + ); +} + +function isServerRequest(stream) { + return ( + typeof stream._consuming === 'boolean' && + typeof stream._dumped === 'boolean' && + stream.req?.upgradeOrConnect === undefined + ); +} + +function willEmitClose(stream) { + if (!isNodeStream(stream)) return null; + + const wState = stream._writableState; + const rState = stream._readableState; + const state = wState || rState; + + return (!state && isServerResponse(stream)) || !!( + state && + state.autoDestroy && + state.emitClose && + state.closed === false + ); +} + +function isDisturbed(stream) { + return !!(stream && ( + stream[kIsDisturbed] ?? + (stream.readableDidRead || stream.readableAborted) + )); +} + +function isErrored(stream) { + return !!(stream && ( + stream[kIsErrored] ?? + stream.readableErrored ?? + stream.writableErrored ?? + stream._readableState?.errorEmitted ?? + stream._writableState?.errorEmitted ?? + stream._readableState?.errored ?? + stream._writableState?.errored + )); +} + +module.exports = { + kDestroyed, + isDisturbed, + kIsDisturbed, + isErrored, + kIsErrored, + isReadable, + kIsReadable, + isClosed, + isDestroyed, + isDuplexNodeStream, + isFinished, + isIterable, + isReadableNodeStream, + isReadableEnded, + isReadableFinished, + isNodeStream, + isWritable, + isWritableNodeStream, + isWritableEnded, + isWritableFinished, + isServerRequest, + isServerResponse, + willEmitClose, +}; diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js new file mode 100644 index 0000000000..1b156f3cba --- /dev/null +++ b/lib/internal/streams/writable.js @@ -0,0 +1,897 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +const { + ArrayPrototypeSlice, + Error, + FunctionPrototypeSymbolHasInstance, + ObjectDefineProperty, + ObjectDefineProperties, + ObjectSetPrototypeOf, + StringPrototypeToLowerCase, + Symbol, + SymbolHasInstance, +} = require('../primordials'); + +module.exports = Writable; +Writable.WritableState = WritableState; + +const EE = require('events'); +const Stream = require('./legacy').Stream; +const { Buffer } = require('buffer'); +const destroyImpl = require('./destroy'); + +const { + addAbortSignal, +} = require('./add-abort-signal'); + +const { + getHighWaterMark, + getDefaultHighWaterMark +} = require('./state'); +const { + ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED, + ERR_STREAM_ALREADY_FINISHED, + ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING +} = require('../errors').codes; + +const { errorOrDestroy } = destroyImpl; + +ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); +ObjectSetPrototypeOf(Writable, Stream); + +function nop() {} + +const kOnFinished = Symbol('kOnFinished'); + +function WritableState(options, stream, isDuplex) { + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') + isDuplex = stream instanceof require('./duplex'); + + // Object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!(options && options.objectMode); + + if (isDuplex) + this.objectMode = this.objectMode || + !!(options && options.writableObjectMode); + + // The point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write(). + this.highWaterMark = options ? + getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) : + getDefaultHighWaterMark(false); + + // if _final has been called. + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // At the start of calling end() + this.ending = false; + // When end() has been called, and returned. + this.ended = false; + // When 'finish' is emitted. + this.finished = false; + + // Has it been destroyed + this.destroyed = false; + + // Should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + const noDecode = !!(options && options.decodeStrings === false); + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'; + + // Not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // A flag to see when we're in the middle of a write. + this.writing = false; + + // When true all writes will be buffered until .uncork() call. + this.corked = 0; + + // A flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // A flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // The callback that's passed to _write(chunk, cb). + this.onwrite = onwrite.bind(undefined, stream); + + // The callback that the user supplies to write(chunk, encoding, cb). + this.writecb = null; + + // The amount that is being written when _write is called. + this.writelen = 0; + + // Storage for data passed to the afterWrite() callback in case of + // synchronous _write() completion. + this.afterWriteTickInfo = null; + + resetBuffer(this); + + // Number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted. + this.pendingcb = 0; + + // Stream is still being constructed and cannot be + // destroyed until construction finished or failed. + // Async construction is opt in, therefore we start as + // constructed. + this.constructed = true; + + // Emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams. + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again. + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = !options || options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end'). + this.autoDestroy = !options || options.autoDestroy !== false; + + // Indicates whether the stream has errored. When true all write() calls + // should return false. This is needed since when autoDestroy + // is disabled we need a way to tell whether the stream has failed. + this.errored = null; + + // Indicates whether the stream has finished destroying. + this.closed = false; + + // True if close has been emitted or would have been emitted + // depending on emitClose. + this.closeEmitted = false; + + this[kOnFinished] = []; +} + +function resetBuffer(state) { + state.buffered = []; + state.bufferedIndex = 0; + state.allBuffers = true; + state.allNoop = true; +} + +WritableState.prototype.getBuffer = function getBuffer() { + return ArrayPrototypeSlice(this.buffered, this.bufferedIndex); +}; + +ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', { + get() { + return this.buffered.length - this.bufferedIndex; + } +}); + +function Writable(options) { + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5. + const isDuplex = (this instanceof require('./duplex')); + + if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) + return new Writable(options); + + this._writableState = new WritableState(options, this, isDuplex); + + if (options) { + if (typeof options.write === 'function') + this._write = options.write; + + if (typeof options.writev === 'function') + this._writev = options.writev; + + if (typeof options.destroy === 'function') + this._destroy = options.destroy; + + if (typeof options.final === 'function') + this._final = options.final; + + if (typeof options.construct === 'function') + this._construct = options.construct; + + if (options.signal) + addAbortSignal(options.signal, this); + } + + Stream.call(this, options); + + destroyImpl.construct(this, () => { + const state = this._writableState; + + if (!state.writing) { + clearBuffer(this, state); + } + + finishMaybe(this, state); + }); +} + +ObjectDefineProperty(Writable, SymbolHasInstance, { + value: function(object) { + if (FunctionPrototypeSymbolHasInstance(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + }, +}); + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function() { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function _write(stream, chunk, encoding, cb) { + const state = stream._writableState; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = state.defaultEncoding; + } else { + if (!encoding) + encoding = state.defaultEncoding; + else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) + throw new ERR_UNKNOWN_ENCODING(encoding); + if (typeof cb !== 'function') + cb = nop; + } + + if (chunk === null) { + throw new ERR_STREAM_NULL_VALUES(); + } else if (!state.objectMode) { + if (typeof chunk === 'string') { + if (state.decodeStrings !== false) { + chunk = Buffer.from(chunk, encoding); + encoding = 'buffer'; + } + } else if (chunk instanceof Buffer) { + encoding = 'buffer'; + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk); + encoding = 'buffer'; + } else { + throw new ERR_INVALID_ARG_TYPE( + 'chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + } + + let err; + if (state.ending) { + err = new ERR_STREAM_WRITE_AFTER_END(); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED('write'); + } + + if (err) { + process.nextTick(cb, err); + errorOrDestroy(stream, err, true); + return err; + } + state.pendingcb++; + return writeOrBuffer(stream, state, chunk, encoding, cb); +} + +Writable.prototype.write = function(chunk, encoding, cb) { + return _write(this, chunk, encoding, cb) === true; +}; + +Writable.prototype.cork = function() { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function() { + const state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing) + clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') + encoding = StringPrototypeToLowerCase(encoding); + if (!Buffer.isEncoding(encoding)) + throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +// If we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, callback) { + const len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + // stream._write resets state.length + const ret = state.length < state.highWaterMark; + // We must ensure that previous needDrain will not be reset to false. + if (!ret) + state.needDrain = true; + + if (state.writing || state.corked || state.errored || !state.constructed) { + state.buffered.push({ chunk, encoding, callback }); + if (state.allBuffers && encoding !== 'buffer') { + state.allBuffers = false; + } + if (state.allNoop && callback !== nop) { + state.allNoop = false; + } + } else { + state.writelen = len; + state.writecb = callback; + state.writing = true; + state.sync = true; + stream._write(chunk, encoding, state.onwrite); + state.sync = false; + } + + // Return false if errored or destroyed in order to break + // any synchronous while(stream.write(data)) loops. + return ret && !state.errored && !state.destroyed; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) + state.onwrite(new ERR_STREAM_DESTROYED('write')); + else if (writev) + stream._writev(chunk, state.onwrite); + else + stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, er, cb) { + --state.pendingcb; + + cb(er); + // Ensure callbacks are invoked even when autoDestroy is + // not enabled. Passing `er` here doesn't make sense since + // it's related to one specific write, not to the buffered + // writes. + errorBuffer(state); + // This can emit error, but error must always follow cb. + errorOrDestroy(stream, er); +} + +function onwrite(stream, er) { + const state = stream._writableState; + const sync = state.sync; + const cb = state.writecb; + + if (typeof cb !== 'function') { + errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()); + return; + } + + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; + + if (er) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + er.stack; // eslint-disable-line no-unused-expressions + + if (!state.errored) { + state.errored = er; + } + + // In case of duplex streams we need to notify the readable side of the + // error. + if (stream._readableState && !stream._readableState.errored) { + stream._readableState.errored = er; + } + + if (sync) { + process.nextTick(onwriteError, stream, state, er, cb); + } else { + onwriteError(stream, state, er, cb); + } + } else { + if (state.buffered.length > state.bufferedIndex) { + clearBuffer(stream, state); + } + + if (sync) { + // It is a common case that the callback passed to .write() is always + // the same. In that case, we do not schedule a new nextTick(), but + // rather just increase a counter, to improve performance and avoid + // memory allocations. + if (state.afterWriteTickInfo !== null && + state.afterWriteTickInfo.cb === cb) { + state.afterWriteTickInfo.count++; + } else { + state.afterWriteTickInfo = { count: 1, cb, stream, state }; + process.nextTick(afterWriteTick, state.afterWriteTickInfo); + } + } else { + afterWrite(stream, state, 1, cb); + } + } +} + +function afterWriteTick({ stream, state, count, cb }) { + state.afterWriteTickInfo = null; + return afterWrite(stream, state, count, cb); +} + +function afterWrite(stream, state, count, cb) { + const needDrain = !state.ending && !stream.destroyed && state.length === 0 && + state.needDrain; + if (needDrain) { + state.needDrain = false; + stream.emit('drain'); + } + + while (count-- > 0) { + state.pendingcb--; + cb(); + } + + if (state.destroyed) { + errorBuffer(state); + } + + finishMaybe(stream, state); +} + +// If there's something in the buffer waiting, then invoke callbacks. +function errorBuffer(state) { + if (state.writing) { + return; + } + + for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { + const { chunk, callback } = state.buffered[n]; + const len = state.objectMode ? 1 : chunk.length; + state.length -= len; + callback(state.errored ?? new ERR_STREAM_DESTROYED('write')); + } + + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](state.errored ?? new ERR_STREAM_DESTROYED('end')); + } + + resetBuffer(state); +} + +// If there's something in the buffer waiting, then process it. +function clearBuffer(stream, state) { + if (state.corked || + state.bufferProcessing || + state.destroyed || + !state.constructed) { + return; + } + + const { buffered, bufferedIndex, objectMode } = state; + const bufferedLength = buffered.length - bufferedIndex; + + if (!bufferedLength) { + return; + } + + let i = bufferedIndex; + + state.bufferProcessing = true; + if (bufferedLength > 1 && stream._writev) { + state.pendingcb -= bufferedLength - 1; + + const callback = state.allNoop ? nop : (err) => { + for (let n = i; n < buffered.length; ++n) { + buffered[n].callback(err); + } + }; + // Make a copy of `buffered` if it's going to be used by `callback` above, + // since `doWrite` will mutate the array. + const chunks = state.allNoop && i === 0 ? + buffered : ArrayPrototypeSlice(buffered, i); + chunks.allBuffers = state.allBuffers; + + doWrite(stream, state, true, state.length, chunks, '', callback); + + resetBuffer(state); + } else { + do { + const { chunk, encoding, callback } = buffered[i]; + buffered[i++] = null; + const len = objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, callback); + } while (i < buffered.length && !state.writing); + + if (i === buffered.length) { + resetBuffer(state); + } else if (i > 256) { + buffered.splice(0, i); + state.bufferedIndex = 0; + } else { + state.bufferedIndex = i; + } + } + state.bufferProcessing = false; +} + +Writable.prototype._write = function(chunk, encoding, cb) { + if (this._writev) { + this._writev([{ chunk, encoding }], cb); + } else { + throw new ERR_METHOD_NOT_IMPLEMENTED('_write()'); + } +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function(chunk, encoding, cb) { + const state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + let err; + + if (chunk !== null && chunk !== undefined) { + const ret = _write(this, chunk, encoding); + if (ret instanceof Error) { + err = ret; + } + } + + // .end() fully uncorks. + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + if (err) { + // Do nothing... + } else if (!state.errored && !state.ending) { + // This is forgiving in terms of unnecessary calls to end() and can hide + // logic errors. However, usually such errors are harmless and causing a + // hard error can be disproportionately destructive. It is not always + // trivial for the user to determine whether end() needs to be called + // or not. + + state.ending = true; + finishMaybe(this, state, true); + state.ended = true; + } else if (state.finished) { + err = new ERR_STREAM_ALREADY_FINISHED('end'); + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED('end'); + } + + if (typeof cb === 'function') { + if (err || state.finished) { + process.nextTick(cb, err); + } else { + state[kOnFinished].push(cb); + } + } + + return this; +}; + +function needFinish(state) { + return (state.ending && + state.constructed && + state.length === 0 && + !state.errored && + state.buffered.length === 0 && + !state.finished && + !state.writing && + !state.errorEmitted && + !state.closeEmitted); +} + +function callFinal(stream, state) { + let called = false; + + function onFinish(err) { + if (called) { + errorOrDestroy(stream, err ?? ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + + state.pendingcb--; + if (err) { + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](err); + } + errorOrDestroy(stream, err, state.sync); + } else if (needFinish(state)) { + state.prefinished = true; + stream.emit('prefinish'); + // Backwards compat. Don't check state.sync here. + // Some streams assume 'finish' will be emitted + // asynchronously relative to _final callback. + state.pendingcb++; + process.nextTick(finish, stream, state); + } + } + + state.sync = true; + state.pendingcb++; + + try { + const result = stream._final(onFinish); + if (result != null) { + const then = result.then; + if (typeof then === 'function') { + then.call( + result, + function() { + if (!called) { + process.nextTick(onFinish, null); + } + }, + function(err) { + if (!called) { + process.nextTick(onFinish, err); + } + }); + } + } + } catch (err) { + onFinish(err); + } + + state.sync = false; +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.finalCalled = true; + callFinal(stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state, sync) { + if (needFinish(state)) { + prefinish(stream, state); + if (state.pendingcb === 0 && needFinish(state)) { + state.pendingcb++; + if (sync) { + process.nextTick(finish, stream, state); + } else { + finish(stream, state); + } + } + } +} + +function finish(stream, state) { + state.pendingcb--; + state.finished = true; + + const onfinishCallbacks = state[kOnFinished].splice(0); + for (let i = 0; i < onfinishCallbacks.length; i++) { + onfinishCallbacks[i](); + } + + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well. + const rState = stream._readableState; + const autoDestroy = !rState || ( + rState.autoDestroy && + // We don't expect the readable to ever 'end' + // if readable is explicitly set to false. + (rState.endEmitted || rState.readable === false) + ); + if (autoDestroy) { + stream.destroy(); + } + } +} + +ObjectDefineProperties(Writable.prototype, { + + destroyed: { + get() { + return this._writableState ? this._writableState.destroyed : false; + }, + set(value) { + // Backward compatibility, the user is explicitly managing destroyed. + if (this._writableState) { + this._writableState.destroyed = value; + } + } + }, + + writable: { + get() { + const w = this._writableState; + // w.writable === false means that this is part of a Duplex stream + // where the writable side was disabled upon construction. + // Compat. The user might manually disable writable side through + // deprecated setter. + return !!w && w.writable !== false && !w.destroyed && !w.errored && + !w.ending && !w.ended; + }, + set(val) { + // Backwards compatible. + if (this._writableState) { + this._writableState.writable = !!val; + } + } + }, + + writableFinished: { + get() { + return this._writableState ? this._writableState.finished : false; + } + }, + + writableObjectMode: { + get() { + return this._writableState ? this._writableState.objectMode : false; + } + }, + + writableBuffer: { + get() { + return this._writableState && this._writableState.getBuffer(); + } + }, + + writableEnded: { + get() { + return this._writableState ? this._writableState.ending : false; + } + }, + + writableNeedDrain: { + get() { + const wState = this._writableState; + if (!wState) return false; + return !wState.destroyed && !wState.ending && wState.needDrain; + } + }, + + writableHighWaterMark: { + get() { + return this._writableState && this._writableState.highWaterMark; + } + }, + + writableCorked: { + get() { + return this._writableState ? this._writableState.corked : 0; + } + }, + + writableLength: { + get() { + return this._writableState && this._writableState.length; + } + } +}); + +const destroy = destroyImpl.destroy; +Writable.prototype.destroy = function(err, cb) { + const state = this._writableState; + + // Invoke pending callbacks. + if (!state.destroyed && + (state.bufferedIndex < state.buffered.length || + state[kOnFinished].length)) { + process.nextTick(errorBuffer, state); + } + + destroy.call(this, err, cb); + return this; +}; + +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function(err, cb) { + cb(err); +}; + +Writable.prototype[EE.captureRejectionSymbol] = function(err) { + this.destroy(err); +}; + +let webStreamsAdapters; + +// Lazy to avoid circular references +function lazyWebStreams() { + if (webStreamsAdapters === undefined) + webStreamsAdapters = {}; + return webStreamsAdapters; +} + +Writable.fromWeb = function(writableStream, options) { + return lazyWebStreams().newStreamWritableFromWritableStream( + writableStream, + options); +}; + +Writable.toWeb = function(streamWritable) { + return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable); +}; diff --git a/lib/internal/validators.js b/lib/internal/validators.js new file mode 100644 index 0000000000..4b672ea3a8 --- /dev/null +++ b/lib/internal/validators.js @@ -0,0 +1,276 @@ +'use strict'; + +const { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeMap, + NumberIsInteger, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + NumberParseInt, + RegExpPrototypeTest, + String, + StringPrototypeToUpperCase, + StringPrototypeTrim, +} = require('./primordials'); + +const { + hideStackFrames, + codes: { + ERR_SOCKET_BAD_PORT, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_SIGNAL, + ERR_INVALID_CALLBACK, + } +} = require('./errors'); +const { normalizeEncoding } = require('../util'); +const { + isAsyncFunction, + isArrayBufferView +} = require('../util'); +const signals = {}; + +function isInt32(value) { + return value === (value | 0); +} + +function isUint32(value) { + return value === (value >>> 0); +} + +const octalReg = /^[0-7]+$/; +const modeDesc = 'must be a 32-bit unsigned integer or an octal string'; + +/** + * Parse and validate values that will be converted into mode_t (the S_* + * constants). Only valid numbers and octal strings are allowed. They could be + * converted to 32-bit unsigned integers or non-negative signed integers in the + * C++ land, but any value higher than 0o777 will result in platform-specific + * behaviors. + * + * @param {*} value Values to be validated + * @param {string} name Name of the argument + * @param {number} [def] If specified, will be returned for invalid values + * @returns {number} + */ +function parseFileMode(value, name, def) { + if (typeof value === 'undefined') { + value = def; + } + + if (typeof value === 'string') { + if (!RegExpPrototypeTest(octalReg, value)) { + throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc); + } + value = NumberParseInt(value, 8); + } + + validateInt32(value, name, 0, 2 ** 32 - 1); + return value; +} + +const validateInteger = hideStackFrames( + (value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => { + if (typeof value !== 'number') + throw new ERR_INVALID_ARG_TYPE(name, 'number', value); + if (!NumberIsInteger(value)) + throw new ERR_OUT_OF_RANGE(name, 'an integer', value); + if (value < min || value > max) + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } +); + +const validateInt32 = hideStackFrames( + (value, name, min = -2147483648, max = 2147483647) => { + // The defaults for min and max correspond to the limits of 32-bit integers. + if (typeof value !== 'number') { + throw new ERR_INVALID_ARG_TYPE(name, 'number', value); + } + if (!isInt32(value)) { + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, 'an integer', value); + } + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + } + } +); + +const validateUint32 = hideStackFrames((value, name, positive) => { + if (typeof value !== 'number') { + throw new ERR_INVALID_ARG_TYPE(name, 'number', value); + } + if (!isUint32(value)) { + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, 'an integer', value); + } + const min = positive ? 1 : 0; + // 2 ** 32 === 4294967296 + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && < 4294967296`, value); + } + if (positive && value === 0) { + throw new ERR_OUT_OF_RANGE(name, '>= 1 && < 4294967296', value); + } +}); + +function validateString(value, name) { + if (typeof value !== 'string') + throw new ERR_INVALID_ARG_TYPE(name, 'string', value); +} + +function validateNumber(value, name) { + if (typeof value !== 'number') + throw new ERR_INVALID_ARG_TYPE(name, 'number', value); +} + +const validateOneOf = hideStackFrames((value, name, oneOf) => { + if (!ArrayPrototypeIncludes(oneOf, value)) { + const allowed = ArrayPrototypeJoin( + ArrayPrototypeMap(oneOf, (v) => + (typeof v === 'string' ? `'${v}'` : String(v))), + ', '); + const reason = 'must be one of: ' + allowed; + throw new ERR_INVALID_ARG_VALUE(name, value, reason); + } +}); + +function validateBoolean(value, name) { + if (typeof value !== 'boolean') + throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value); +} + +/** + * @param {unknown} value + * @param {string} name + * @param {{ + * allowArray?: boolean, + * allowFunction?: boolean, + * nullable?: boolean + * }} [options] + */ +const validateObject = hideStackFrames( + (value, name, options) => { + const useDefaultOptions = options == null; + const allowArray = useDefaultOptions ? false : options.allowArray; + const allowFunction = useDefaultOptions ? false : options.allowFunction; + const nullable = useDefaultOptions ? false : options.nullable; + if ((!nullable && value === null) || + (!allowArray && ArrayIsArray(value)) || + (typeof value !== 'object' && ( + !allowFunction || typeof value !== 'function' + ))) { + throw new ERR_INVALID_ARG_TYPE(name, 'Object', value); + } + }); + +const validateArray = hideStackFrames((value, name, minLength = 0) => { + if (!ArrayIsArray(value)) { + throw new ERR_INVALID_ARG_TYPE(name, 'Array', value); + } + if (value.length < minLength) { + const reason = `must be longer than ${minLength}`; + throw new ERR_INVALID_ARG_VALUE(name, value, reason); + } +}); + +function validateSignalName(signal, name = 'signal') { + validateString(signal, name); + + if (signals[signal] === undefined) { + if (signals[StringPrototypeToUpperCase(signal)] !== undefined) { + throw new ERR_UNKNOWN_SIGNAL(signal + + ' (signals must use all capital letters)'); + } + + throw new ERR_UNKNOWN_SIGNAL(signal); + } +} + +const validateBuffer = hideStackFrames((buffer, name = 'buffer') => { + if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE(name, + ['Buffer', 'TypedArray', 'DataView'], + buffer); + } +}); + +function validateEncoding(data, encoding) { + const normalizedEncoding = normalizeEncoding(encoding); + const length = data.length; + + if (normalizedEncoding === 'hex' && length % 2 !== 0) { + throw new ERR_INVALID_ARG_VALUE('encoding', encoding, + `is invalid for data of length ${length}`); + } +} + +// Check that the port number is not NaN when coerced to a number, +// is an integer and that it falls within the legal range of port numbers. +function validatePort(port, name = 'Port', allowZero = true) { + if ((typeof port !== 'number' && typeof port !== 'string') || + (typeof port === 'string' && StringPrototypeTrim(port).length === 0) || + +port !== (+port >>> 0) || + port > 0xFFFF || + (port === 0 && !allowZero)) { + throw new ERR_SOCKET_BAD_PORT(name, port, allowZero); + } + return port | 0; +} + +const validateCallback = hideStackFrames((callback) => { + if (typeof callback !== 'function') + throw new ERR_INVALID_CALLBACK(callback); +}); + +const validateAbortSignal = hideStackFrames((signal, name) => { + if (signal !== undefined && + (signal === null || + typeof signal !== 'object' || + !('aborted' in signal))) { + throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal); + } +}); + +const validateFunction = hideStackFrames((value, name) => { + if (typeof value !== 'function') + throw new ERR_INVALID_ARG_TYPE(name, 'Function', value); +}); + +const validatePlainFunction = hideStackFrames((value, name) => { + if (typeof value !== 'function' || isAsyncFunction(value)) + throw new ERR_INVALID_ARG_TYPE(name, 'Function', value); +}); + +const validateUndefined = hideStackFrames((value, name) => { + if (value !== undefined) + throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value); +}); + +module.exports = { + isInt32, + isUint32, + parseFileMode, + validateArray, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInt32, + validateInteger, + validateNumber, + validateObject, + validateOneOf, + validatePlainFunction, + validatePort, + validateSignalName, + validateString, + validateUint32, + validateUndefined, + validateCallback, + validateAbortSignal, +}; diff --git a/lib/stream.js b/lib/stream.js new file mode 100644 index 0000000000..469205bd7a --- /dev/null +++ b/lib/stream.js @@ -0,0 +1,137 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +const { + ObjectDefineProperty, + ObjectKeys, + ReflectApply, +} = require('./internal/primordials'); + +const { + promisify: { custom: customPromisify }, +} = require('./util'); + +const { + streamReturningOperators, + promiseReturningOperators, +} = require('./internal/streams/operators'); + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + }, +} = require('./internal/errors'); +const compose = require('./internal/streams/compose'); +const { pipeline } = require('./internal/streams/pipeline'); +const { destroyer } = require('./internal/streams/destroy'); +const eos = require('./internal/streams/end-of-stream'); +const internalBuffer = {}; + +const promises = require('./stream/promises'); +const utils = require('./internal/streams/utils'); + +const Stream = module.exports = require('./internal/streams/legacy').Stream; +Stream.isDisturbed = utils.isDisturbed; +Stream.isErrored = utils.isErrored; +Stream.isReadable = utils.isReadable; +Stream.Readable = require('./internal/streams/readable'); +for (const key of ObjectKeys(streamReturningOperators)) { + const op = streamReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw ERR_ILLEGAL_CONSTRUCTOR(); + } + return Stream.Readable.from(ReflectApply(op, this, args)); + } + ObjectDefineProperty(fn, 'name', { value: op.name }); + ObjectDefineProperty(fn, 'length', { value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +for (const key of ObjectKeys(promiseReturningOperators)) { + const op = promiseReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw ERR_ILLEGAL_CONSTRUCTOR(); + } + return ReflectApply(op, this, args); + } + ObjectDefineProperty(fn, 'name', { value: op.name }); + ObjectDefineProperty(fn, 'length', { value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +Stream.Writable = require('./internal/streams/writable'); +Stream.Duplex = require('./internal/streams/duplex'); +Stream.Transform = require('./internal/streams/transform'); +Stream.PassThrough = require('./internal/streams/passthrough'); +Stream.pipeline = pipeline; +const { addAbortSignal } = require('./internal/streams/add-abort-signal'); +Stream.addAbortSignal = addAbortSignal; +Stream.finished = eos; +Stream.destroy = destroyer; +Stream.compose = compose; + +ObjectDefineProperty(Stream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises; + } +}); + +ObjectDefineProperty(pipeline, customPromisify, { + enumerable: true, + get() { + return promises.pipeline; + } +}); + +ObjectDefineProperty(eos, customPromisify, { + enumerable: true, + get() { + return promises.finished; + } +}); + +// Backwards-compat with node 0.4.x +Stream.Stream = Stream; + + + Stream._isUint8Array = function isUint8Array(value) { + return value instanceof Uint8Array + }; + +Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk.buffer, + chunk.byteOffset, + chunk.byteLength); +}; diff --git a/lib/stream/promises.js b/lib/stream/promises.js new file mode 100644 index 0000000000..7d5e7f680c --- /dev/null +++ b/lib/stream/promises.js @@ -0,0 +1,41 @@ +'use strict'; + +const { + ArrayPrototypePop, + Promise, +} = require('../internal/primordials'); + +const { + isIterable, + isNodeStream, +} = require('../internal/streams/utils'); + +const { pipelineImpl: pl } = require('../internal/streams/pipeline'); +const { finished } = require('../internal/streams/end-of-stream'); + +function pipeline(...streams) { + return new Promise((resolve, reject) => { + let signal; + let end; + const lastArg = streams[streams.length - 1]; + if (lastArg && typeof lastArg === 'object' && + !isNodeStream(lastArg) && !isIterable(lastArg)) { + const options = ArrayPrototypePop(streams); + signal = options.signal; + end = options.end; + } + + pl(streams, (err, value) => { + if (err) { + reject(err); + } else { + resolve(value); + } + }, { signal, end }); + }); +} + +module.exports = { + finished, + pipeline, +}; diff --git a/lib/util.js b/lib/util.js new file mode 100644 index 0000000000..393a56e534 --- /dev/null +++ b/lib/util.js @@ -0,0 +1,78 @@ +'use strict' + +module.exports = { + debuglog() { + return function () {} + }, + once(callback) { + let called = false + return function (...args) { + if (called) { + return + } + called = true + callback.apply(this, args) + } + }, + promisify: function (fn) { + return new Promise((resolve, reject) => { + fn((err, ...args) => { + if (err) { + return reject(err) + } + return resolve(...args) + }) + }) + }, + createDeferredPromise: function () { + let resolve + let reject + + // eslint-disable-next-line promise/param-names + const promise = new Promise((res, rej) => { + resolve = res + reject = rej + }) + + return { promise, resolve, reject } + }, + isError(err) { + return err instanceof Error + }, + ...require('util').types, + // isAsyncFunction, + // isArrayBufferView, + // isRegExp, + // isDate, + + // isAnyArrayBuffer, + // isDataView, + // isPromise, + // isWeakSet, + // isWeakMap, + // isModuleNamespaceObject, + // isBoxedPrimitive, + // isExternal, + // isArgumentsObject, + // isGeneratorFunction, + // Keep in sync with https://github.com/nodejs/node/blob/master/typings/internalBinding/util.d.ts + propertyFilter: { + ALL_PROPERTIES: 0, + ONLY_ENUMERABLE: 2 + }, + // The following methods are not 100% accurate, but there are no equivalent on user-land JS outside of V8 + getProxyDetails(proxy) { + return undefined + }, + getConstructorName(obj) { + return obj !== 'undefined' ? 'undefined' : obj.constructor?.name ?? 'Object' + }, + getOwnNonIndexProperties(obj) { + return Object.getOwnPropertyNames(obj) + }, + join(arr, separator) { + return arr.join(separator) + } +} + +module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/package.json b/package.json index 182f7fd189..b2709ad3fb 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,6 @@ "name": "readable-stream", "version": "3.6.0", "description": "Streams3, a user-land copy of the stream library from Node.js", - "author": "NearForm Ltd", "homepage": "https://github.com/nodejs/readable-stream", "license": "MIT", "licenses": [ @@ -33,28 +32,29 @@ "util": false, "worker_threads": false, "./lib/index.js": "./lib/browser.js", - "./lib/internal/errors.js": "./lib/internal/errors-browser.js" + "./lib/internal/inspect.js": "./lib/internal/inspect-browser.js" }, "scripts": { "build": "node build/build.mjs", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test-browsers": "airtap test/browser/test-*.js", - "test-browsers-local": "airtap -p local test/browser/test-*.js", + "test:browsers": "airtap test/browser/test-*.js", + "test:browsers:local": "airtap -p local test/browser/test-*.js", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src", "lint": "eslint src" }, "dependencies": { - "inherits": "^2.0.4", - "util-deprecate": "^1.0.2" + "aggregate-error": "^3.1.0", + "abort-controller": "^3.0.0" }, "devDependencies": { + "@sinonjs/fake-timers": "^9.1.1", "airtap": "^4.0.4", "airtap-playwright": "^1.0.1", + "airtap-sauce": "^1.1.2", "c8": "^7.11.0", "eslint": "^7.32.0", "eslint-config-standard": "^16.0.3", - "lolex": "^6.0.0", "prettier": "^2.6.2", "tap": "^16.0.1", "tape": "^5.5.2", diff --git a/src/browser.js b/src/browser.js index eb58b61bfe..c38850733a 100644 --- a/src/browser.js +++ b/src/browser.js @@ -1,16 +1,38 @@ 'use strict' -const Readable = require('./_stream_readable') +const CustomStream = require('./stream') +const promises = require('./stream/promises') +const originalDestroy = CustomStream.Readable.destroy -module.exports = Readable -module.exports.Stream = require('./internal/streams/legacy') -module.exports.Readable = Readable -module.exports.Writable = require('./_stream_writable') -module.exports.Duplex = require('./_stream_duplex') -module.exports.Transform = require('./_stream_transform') -module.exports.PassThrough = require('./_stream_passthrough') -module.exports.finished = require('./internal/streams/end-of-stream') -module.exports.pipeline = require('./internal/streams/pipeline') +module.exports = CustomStream.Readable + +// Explicit export naming is needed for ESM +module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer +module.exports._isUint8Array = CustomStream._isUint8Array +module.exports.isDisturbed = CustomStream.isDisturbed +module.exports.isErrored = CustomStream.isErrored +module.exports.isReadable = CustomStream.isReadable +module.exports.Readable = CustomStream.Readable +module.exports.Writable = CustomStream.Writable +module.exports.Duplex = CustomStream.Duplex +module.exports.Transform = CustomStream.Transform +module.exports.PassThrough = CustomStream.PassThrough +module.exports.addAbortSignal = CustomStream.addAbortSignal +module.exports.finished = CustomStream.finished +module.exports.destroy = CustomStream.destroy +module.exports.destroy = originalDestroy +module.exports.pipeline = CustomStream.pipeline +module.exports.compose = CustomStream.compose + +Object.defineProperty(CustomStream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises + } +}) + +module.exports.Stream = CustomStream.Stream // Allow default importing module.exports.default = module.exports diff --git a/src/index.js b/src/index.js index 7cdb468fad..e9b59ded0e 100644 --- a/src/index.js +++ b/src/index.js @@ -3,28 +3,68 @@ const Stream = require('stream') if (Stream && process.env.READABLE_STREAM === 'disable') { - module.exports = Stream.Readable + const promises = require('stream/promises') - module.exports.Stream = require('./internal/streams/legacy') + // Explicit export naming is needed for ESM + module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer + module.exports._isUint8Array = Stream._isUint8Array + module.exports.isDisturbed = Stream.isDisturbed + module.exports.isErrored = Stream.isErrored + module.exports.isReadable = Stream.isReadable module.exports.Readable = Stream.Readable module.exports.Writable = Stream.Writable module.exports.Duplex = Stream.Duplex module.exports.Transform = Stream.Transform module.exports.PassThrough = Stream.PassThrough + module.exports.addAbortSignal = Stream.addAbortSignal module.exports.finished = Stream.finished + module.exports.destroy = Stream.destroy module.exports.pipeline = Stream.pipeline + module.exports.compose = Stream.compose + + Object.defineProperty(Stream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises + } + }) + + module.exports.Stream = Stream.Stream } else { - const Readable = require('./_stream_readable') - - module.exports = Readable - module.exports.Stream = require('./internal/streams/legacy') - module.exports.Readable = Readable - module.exports.Writable = require('./_stream_writable') - module.exports.Duplex = require('./_stream_duplex') - module.exports.Transform = require('./_stream_transform') - module.exports.PassThrough = require('./_stream_passthrough') - module.exports.finished = require('./internal/streams/end-of-stream') - module.exports.pipeline = require('./internal/streams/pipeline') + const CustomStream = require('./stream') + const promises = require('./stream/promises') + const originalDestroy = CustomStream.Readable.destroy + + module.exports = CustomStream.Readable + + // Explicit export naming is needed for ESM + module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer + module.exports._isUint8Array = CustomStream._isUint8Array + module.exports.isDisturbed = CustomStream.isDisturbed + module.exports.isErrored = CustomStream.isErrored + module.exports.isReadable = CustomStream.isReadable + module.exports.Readable = CustomStream.Readable + module.exports.Writable = CustomStream.Writable + module.exports.Duplex = CustomStream.Duplex + module.exports.Transform = CustomStream.Transform + module.exports.PassThrough = CustomStream.PassThrough + module.exports.addAbortSignal = CustomStream.addAbortSignal + module.exports.finished = CustomStream.finished + module.exports.destroy = CustomStream.destroy + module.exports.destroy = originalDestroy + module.exports.pipeline = CustomStream.pipeline + module.exports.compose = CustomStream.compose + + Object.defineProperty(CustomStream, 'promises', { + configurable: true, + enumerable: true, + get() { + return promises + } + }) + + module.exports.Stream = CustomStream.Stream } // Allow default importing diff --git a/src/test/browser/test-stream-pipe-error-handling.js b/src/test/browser/test-stream-pipe-error-handling.js index 14016936a2..005167679c 100644 --- a/src/test/browser/test-stream-pipe-error-handling.js +++ b/src/test/browser/test-stream-pipe-error-handling.js @@ -46,16 +46,22 @@ test('Error WithoutListener Throws', function (t) { test('Error With Removed Listener Throws', function (t) { t.plan(2) + const onerror = global.onerror + const r = new Readable() const w = new Writable() let removed = false + let caught = false + + global.onerror = () => { + t.notOk(caught) + global.onerror = onerror + } r._read = function () { setTimeout(function () { t.ok(removed) - t.throws(function () { - w.emit('error', new Error('fail')) - }) + w.emit('error', new Error('fail')) }) } @@ -65,11 +71,11 @@ test('Error With Removed Listener Throws', function (t) { removed = true function myOnError(er) { - throw new Error('this should not happen') + caught = true } }) -test('Error With Removed Listener Throws', function (t) { +test('Error Listener Catches When Wrong Listener Is Removed', function (t) { t.plan(2) const r = new Readable() diff --git a/src/test/browser/test-stream-unshift-read-race.js b/src/test/browser/test-stream-unshift-read-race.js index 3afa2beaed..baa6477878 100644 --- a/src/test/browser/test-stream-unshift-read-race.js +++ b/src/test/browser/test-stream-unshift-read-race.js @@ -11,7 +11,7 @@ const test = require('tape') const stream = require('../../lib') test('unshift read race', function (t) { - t.plan(141) + t.plan(139) const hwm = 10 const r = stream.Readable({ highWaterMark: hwm }) @@ -33,7 +33,7 @@ test('unshift read race', function (t) { function push(fast) { t.notOk(pushedNull, 'push() after null push') - const c = pos >= data.length ? null : data.slice(pos, Math.min(pos + n, data.length)) + const c = pos >= data.length ? null : data.slice(pos, pos + n) pushedNull = c === null if (fast) { pos += n @@ -48,15 +48,22 @@ test('unshift read race', function (t) { if (c === null) { pushError() } - }) + }, 1) } } } function pushError() { - t.throws(function () { - r.push(Buffer.alloc(1)) - }) + r.unshift(Buffer.allocUnsafe(1)) + w.end() + + const onerror = global.onerror + global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { + t.ok(true) + global.onerror = onerror + } + + r.push(Buffer.allocUnsafe(1)) } const w = stream.Writable() @@ -66,15 +73,7 @@ test('unshift read race', function (t) { cb() } - let ended = false - r.on('end', function () { - t.notOk(ended, 'end emitted more than once') - t.throws(function () { - r.unshift(Buffer.alloc(1)) - }) - ended = true - w.end() - }) + r.on('end', t.fail) r.on('readable', function () { let chunk diff --git a/src/test/browser/test-stream2-pipe-error-handling.js b/src/test/browser/test-stream2-pipe-error-handling.js index a3b053e52c..2eef740e9c 100644 --- a/src/test/browser/test-stream2-pipe-error-handling.js +++ b/src/test/browser/test-stream2-pipe-error-handling.js @@ -76,14 +76,14 @@ test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) }) const err = new Error('This stream turned into bacon.') + const onerror = global.onerror - let gotErr = null - try { - dest.emit('error', err) - } catch (e) { - gotErr = e + dest.emit('error', err) + + global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { + t.strictEqual(gotErr, err) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) + global.onerror = onerror } - t.strictEqual(gotErr, err) - t.strictEqual(unpipedSource, source) - t.strictEqual(unpipedDest, dest) }) diff --git a/src/test/browser/test-stream2-readable-legacy-drain.js b/src/test/browser/test-stream2-readable-legacy-drain.js index c4c8ca1816..40a042e72e 100644 --- a/src/test/browser/test-stream2-readable-legacy-drain.js +++ b/src/test/browser/test-stream2-readable-legacy-drain.js @@ -41,12 +41,5 @@ test('readable legacy drain', function (t) { t.ok(true, 'wended') } - // Just for kicks, let's mess with the drain count. - // This verifies that even if it gets negative in the - // pipe() cleanup function, we'll still function properly. - r.on('readable', function () { - w.emit('drain') - }) - r.pipe(w) }) diff --git a/src/test/browser/test-stream2-transform.js b/src/test/browser/test-stream2-transform.js index 4fa2edb25b..f32063c4c7 100644 --- a/src/test/browser/test-stream2-transform.js +++ b/src/test/browser/test-stream2-transform.js @@ -4,7 +4,7 @@ const test = require('tape') const { PassThrough, Transform } = require('../../lib') test('writable side consumption', function (t) { - t.plan(4) + t.plan(3) const tx = new Transform({ highWaterMark: 10 @@ -24,12 +24,11 @@ test('writable side consumption', function (t) { t.equal(tx._readableState.length, 10) t.equal(transformed, 10) - t.equal(tx._transformState.writechunk.length, 5) t.same( tx._writableState.getBuffer().map(function (c) { return c.chunk.length }), - [6, 7, 8, 9, 10] + [5, 6, 7, 8, 9, 10] ) }) diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js index cbcedf6a89..3cce73b00f 100644 --- a/src/test/browser/test-stream2-writable.js +++ b/src/test/browser/test-stream2-writable.js @@ -311,7 +311,7 @@ test('encoding should be ignored for buffers', function (t) { test('writables are not pipable', function (t) { t.plan(1) - const w = new Writable() + const w = new Writable({ autoDestroy: false }) w._write = function () {} let gotError = false w.on('error', function (er) { diff --git a/src/test/ours/test-errors.js b/src/test/ours/test-errors.js index bd9c810c51..079e1017a3 100644 --- a/src/test/ours/test-errors.js +++ b/src/test/ours/test-errors.js @@ -5,7 +5,7 @@ const { codes: errors } = require('../../lib/internal/errors') function checkError(err, Base, name, code, message) { t.ok(err instanceof Base) - t.equal(err.name, `${name} [${code}]`) + t.equal(err.name, name) t.equal(err.code, code) t.equal(err.message, message) } @@ -14,19 +14,19 @@ function checkError(err, Base, name, code, message) { t.plan(17 * 4) checkError( - new errors.ERR_INVALID_OPT_VALUE('name', 0), + new errors.ERR_INVALID_ARG_VALUE('name', 0), TypeError, 'TypeError', - 'ERR_INVALID_OPT_VALUE', - 'The value "0" is invalid for option "name"' + 'ERR_INVALID_ARG_VALUE', + "The argument 'name' is invalid. Received 0" ) checkError( - new errors.ERR_INVALID_OPT_VALUE('name', undefined), + new errors.ERR_INVALID_ARG_VALUE('name', undefined), TypeError, 'TypeError', - 'ERR_INVALID_OPT_VALUE', - 'The value "undefined" is invalid for option "name"' + 'ERR_INVALID_ARG_VALUE', + "The argument 'name' is invalid. Received undefined" ) checkError( @@ -34,7 +34,7 @@ checkError( TypeError, 'TypeError', 'ERR_INVALID_ARG_TYPE', - 'The "chunk" argument must be one of type string, Buffer, or Uint8Array. Received type number' + 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)' ) checkError( @@ -42,7 +42,7 @@ checkError( TypeError, 'TypeError', 'ERR_INVALID_ARG_TYPE', - 'The first argument must not be of type string. Received type string' + "The first argument must be not string. Received type string ('foo')" ) checkError( @@ -50,7 +50,7 @@ checkError( TypeError, 'TypeError', 'ERR_INVALID_ARG_TYPE', - 'The "obj.prop" property must be of type string. Received type undefined' + 'The "obj.prop" property must be of type string. Received undefined' ) checkError( diff --git a/src/test/ours/test-lolex-fake-timers.js b/src/test/ours/test-fake-timers.js similarity index 85% rename from src/test/ours/test-lolex-fake-timers.js rename to src/test/ours/test-fake-timers.js index 3198952962..ec16c10f4d 100644 --- a/src/test/ours/test-lolex-fake-timers.js +++ b/src/test/ours/test-fake-timers.js @@ -3,7 +3,7 @@ require('../common') const t = require('tap') const util = require('util') -const lolex = require('lolex') +const fakeTimers = require('@sinonjs/fake-timers') const Transform = require('../../lib').Transform t.plan(1) @@ -14,7 +14,7 @@ function MyTransform() { util.inherits(MyTransform, Transform) -const clock = lolex.install({ toFake: ['setImmediate', 'nextTick'] }) +const clock = fakeTimers.install({ toFake: ['setImmediate', 'nextTick'] }) let stream2DataCalled = false const stream = new MyTransform() diff --git a/src/util.js b/src/util.js index e1b6d12a62..393a56e534 100644 --- a/src/util.js +++ b/src/util.js @@ -1,18 +1,11 @@ 'use strict' -let debugUtil -try { - debugUtil = require('util') -} catch (e) { - // No-op -} - module.exports = { - inherits: require('inherits'), - debuglog: debugUtil?.debuglog ? debugUtil.debuglog : () => function () {}, + debuglog() { + return function () {} + }, once(callback) { let called = false - return function (...args) { if (called) { return @@ -21,29 +14,65 @@ module.exports = { callback.apply(this, args) } }, - // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args - format(format, ...args) { - return format.replace(/%([sdifj])/g, function (...[_unused, type]) { - const replacement = args.shift() - - if (type === 'f') { - return replacement.toFixed(6) - } else if (type === 'j') { - return JSON.stringify(replacement) - } else { - return replacement.toString() - } - }) - }, promisify: function (fn) { return new Promise((resolve, reject) => { fn((err, ...args) => { if (err) { return reject(err) } - return resolve(...args) }) }) + }, + createDeferredPromise: function () { + let resolve + let reject + + // eslint-disable-next-line promise/param-names + const promise = new Promise((res, rej) => { + resolve = res + reject = rej + }) + + return { promise, resolve, reject } + }, + isError(err) { + return err instanceof Error + }, + ...require('util').types, + // isAsyncFunction, + // isArrayBufferView, + // isRegExp, + // isDate, + + // isAnyArrayBuffer, + // isDataView, + // isPromise, + // isWeakSet, + // isWeakMap, + // isModuleNamespaceObject, + // isBoxedPrimitive, + // isExternal, + // isArgumentsObject, + // isGeneratorFunction, + // Keep in sync with https://github.com/nodejs/node/blob/master/typings/internalBinding/util.d.ts + propertyFilter: { + ALL_PROPERTIES: 0, + ONLY_ENUMERABLE: 2 + }, + // The following methods are not 100% accurate, but there are no equivalent on user-land JS outside of V8 + getProxyDetails(proxy) { + return undefined + }, + getConstructorName(obj) { + return obj !== 'undefined' ? 'undefined' : obj.constructor?.name ?? 'Object' + }, + getOwnNonIndexProperties(obj) { + return Object.getOwnPropertyNames(obj) + }, + join(arr, separator) { + return arr.join(separator) } } + +module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/src/uv-browser.js b/src/uv-browser.js deleted file mode 100644 index c293a47b1c..0000000000 --- a/src/uv-browser.js +++ /dev/null @@ -1,93 +0,0 @@ -'use strict' - -// Regenerate this file if needed by requiring process.binding('uv') in the REPL - -module.exports = { - errmap: new Map([ - [-7, ['E2BIG', 'argument list too long']], - [-13, ['EACCES', 'permission denied']], - [-48, ['EADDRINUSE', 'address already in use']], - [-49, ['EADDRNOTAVAIL', 'address not available']], - [-47, ['EAFNOSUPPORT', 'address family not supported']], - [-35, ['EAGAIN', 'resource temporarily unavailable']], - [-3000, ['EAI_ADDRFAMILY', 'address family not supported']], - [-3001, ['EAI_AGAIN', 'temporary failure']], - [-3002, ['EAI_BADFLAGS', 'bad ai_flags value']], - [-3013, ['EAI_BADHINTS', 'invalid value for hints']], - [-3003, ['EAI_CANCELED', 'request canceled']], - [-3004, ['EAI_FAIL', 'permanent failure']], - [-3005, ['EAI_FAMILY', 'ai_family not supported']], - [-3006, ['EAI_MEMORY', 'out of memory']], - [-3007, ['EAI_NODATA', 'no address']], - [-3008, ['EAI_NONAME', 'unknown node or service']], - [-3009, ['EAI_OVERFLOW', 'argument buffer overflow']], - [-3014, ['EAI_PROTOCOL', 'resolved protocol is unknown']], - [-3010, ['EAI_SERVICE', 'service not available for socket type']], - [-3011, ['EAI_SOCKTYPE', 'socket type not supported']], - [-37, ['EALREADY', 'connection already in progress']], - [-9, ['EBADF', 'bad file descriptor']], - [-16, ['EBUSY', 'resource busy or locked']], - [-89, ['ECANCELED', 'operation canceled']], - [-4080, ['ECHARSET', 'invalid Unicode character']], - [-53, ['ECONNABORTED', 'software caused connection abort']], - [-61, ['ECONNREFUSED', 'connection refused']], - [-54, ['ECONNRESET', 'connection reset by peer']], - [-39, ['EDESTADDRREQ', 'destination address required']], - [-17, ['EEXIST', 'file already exists']], - [-14, ['EFAULT', 'bad address in system call argument']], - [-27, ['EFBIG', 'file too large']], - [-65, ['EHOSTUNREACH', 'host is unreachable']], - [-4, ['EINTR', 'interrupted system call']], - [-22, ['EINVAL', 'invalid argument']], - [-5, ['EIO', 'i/o error']], - [-56, ['EISCONN', 'socket is already connected']], - [-21, ['EISDIR', 'illegal operation on a directory']], - [-62, ['ELOOP', 'too many symbolic links encountered']], - [-24, ['EMFILE', 'too many open files']], - [-40, ['EMSGSIZE', 'message too long']], - [-63, ['ENAMETOOLONG', 'name too long']], - [-50, ['ENETDOWN', 'network is down']], - [-51, ['ENETUNREACH', 'network is unreachable']], - [-23, ['ENFILE', 'file table overflow']], - [-55, ['ENOBUFS', 'no buffer space available']], - [-19, ['ENODEV', 'no such device']], - [-2, ['ENOENT', 'no such file or directory']], - [-12, ['ENOMEM', 'not enough memory']], - [-4056, ['ENONET', 'machine is not on the network']], - [-42, ['ENOPROTOOPT', 'protocol not available']], - [-28, ['ENOSPC', 'no space left on device']], - [-78, ['ENOSYS', 'function not implemented']], - [-57, ['ENOTCONN', 'socket is not connected']], - [-20, ['ENOTDIR', 'not a directory']], - [-66, ['ENOTEMPTY', 'directory not empty']], - [-38, ['ENOTSOCK', 'socket operation on non-socket']], - [-45, ['ENOTSUP', 'operation not supported on socket']], - [-84, ['EOVERFLOW', 'value too large for defined data type']], - [-1, ['EPERM', 'operation not permitted']], - [-32, ['EPIPE', 'broken pipe']], - [-100, ['EPROTO', 'protocol error']], - [-43, ['EPROTONOSUPPORT', 'protocol not supported']], - [-41, ['EPROTOTYPE', 'protocol wrong type for socket']], - [-34, ['ERANGE', 'result too large']], - [-30, ['EROFS', 'read-only file system']], - [-58, ['ESHUTDOWN', 'cannot send after transport endpoint shutdown']], - [-29, ['ESPIPE', 'invalid seek']], - [-3, ['ESRCH', 'no such process']], - [-60, ['ETIMEDOUT', 'connection timed out']], - [-26, ['ETXTBSY', 'text file is busy']], - [-18, ['EXDEV', 'cross-device link not permitted']], - [-4094, ['UNKNOWN', 'unknown error']], - [-4095, ['EOF', 'end of file']], - [-6, ['ENXIO', 'no such device or address']], - [-31, ['EMLINK', 'too many links']], - [-64, ['EHOSTDOWN', 'host is down']], - [-4030, ['EREMOTEIO', 'remote I/O error']], - [-25, ['ENOTTY', 'inappropriate ioctl for device']], - [-79, ['EFTYPE', 'inappropriate file type or format']], - [-92, ['EILSEQ', 'illegal byte sequence']], - [-44, ['ESOCKTNOSUPPORT', 'socket type not supported']] - ]), - UV_EAI_MEMORY: -3006, - UV_EAI_NODATA: -3007, - UV_EAI_NONAME: -3008 -} diff --git a/tap.yml b/tap.yml index dfaff10006..8de8ebe6d5 100644 --- a/tap.yml +++ b/tap.yml @@ -1,5 +1,6 @@ --- -bail: true +bail: false coverage: false node-arg: - --expose-internals + - --no-warnings diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js new file mode 100644 index 0000000000..38e4b2e2ac --- /dev/null +++ b/test/browser/test-stream-big-packet.js @@ -0,0 +1,68 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Transform } = require('../../lib') + +test('big packet', function (t) { + t.plan(3) + + let passed = false + + function PassThrough() { + Transform.call(this) + } + inherits(PassThrough, Transform) + + PassThrough.prototype._transform = function (chunk, encoding, done) { + this.push(chunk) + done() + } + + function TestStream() { + Transform.call(this) + } + inherits(TestStream, Transform) + + TestStream.prototype._transform = function (chunk, encoding, done) { + if (!passed) { + // Char 'a' only exists in the last write + passed = indexOf(chunk.toString(), 'a') >= 0 + } + if (passed) { + t.ok(passed) + } + done() + } + + const s1 = new PassThrough() + const s2 = new PassThrough() + const s3 = new TestStream() + + s1.pipe(s3) + // Don't let s2 auto close which may close s3 + s2.pipe(s3, { end: false }) + + // We must write a buffer larger than highWaterMark + const big = Buffer.alloc(s1._writableState.highWaterMark + 1) + big.fill('x') + + // Since big is larger than highWaterMark, it will be buffered internally. + t.notOk(s1.write(big)) + + // 'tiny' is small enough to pass through internal buffer. + t.ok(s2.write('tiny')) + + // Write some small data in next IO loop, which will never be written to s3 + // Because 'drain' event is not emitted from s1 and s1 is still paused + setImmediate(s1.write.bind(s1), 'later') + + function indexOf(xs, x) { + for (let i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) { + return i + } + } + return -1 + } +}) diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js new file mode 100644 index 0000000000..46b2524df6 --- /dev/null +++ b/test/browser/test-stream-big-push.js @@ -0,0 +1,70 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('big push', function (t) { + t.plan(10) + + const str = 'asdfasdfasdfasdfasdf' + + const r = new Readable({ + highWaterMark: 5, + encoding: 'utf8' + }) + + let reads = 0 + let eofed = false + let ended = false + + r._read = function (n) { + if (reads === 0) { + setTimeout(function () { + r.push(str) + }) + reads++ + } else if (reads === 1) { + const ret = r.push(str) + t.equal(ret, false) + reads++ + } else { + t.notOk(eofed) + eofed = true + r.push(null) + } + } + + r.on('end', function () { + ended = true + }) + + // push some data in to start. + // we've never gotten any read event at this point. + const ret = r.push(str) + + // should be false. > hwm + t.notOk(ret) + let chunk = r.read() + t.equal(chunk, str) + + chunk = r.read() + t.equal(chunk, null) + + r.once('readable', function () { + // this time, we'll get *all* the remaining data, because + // it's been added synchronously, as the read WOULD take + // us below the hwm, and so it triggered a _read() again, + // which synchronously added more, which we then return. + chunk = r.read() + t.equal(chunk, str + str) + + chunk = r.read() + t.equal(chunk, null) + }) + + r.on('end', function () { + t.ok(eofed) + t.ok(ended) + t.equal(reads, 2) + }) +}) diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js new file mode 100644 index 0000000000..ec17de6c4e --- /dev/null +++ b/test/browser/test-stream-duplex.js @@ -0,0 +1,36 @@ +'use strict' + +const test = require('tape') +const { Duplex } = require('../../lib') + +test('duplex', function (t) { + t.plan(4) + + const stream = new Duplex({ objectMode: true }) + + t.ok(stream._readableState.objectMode) + t.ok(stream._writableState.objectMode) + + let written + let read + + stream._write = function (obj, _, cb) { + written = obj + cb() + } + + stream._read = function () {} + + stream.on('data', function (obj) { + read = obj + }) + + stream.on('end', function () { + t.equal(read.val, 1) + t.equal(written.val, 2) + }) + + stream.push({ val: 1 }) + stream.end({ val: 2 }) + stream.push(null) +}) diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js new file mode 100644 index 0000000000..927fa24943 --- /dev/null +++ b/test/browser/test-stream-end-paused.js @@ -0,0 +1,30 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('end pause', function (t) { + t.plan(2) + + const stream = new Readable() + let calledRead = false + + stream._read = function () { + t.notOk(calledRead) + calledRead = true + this.push(null) + } + + stream.on('data', function () { + throw new Error('should not ever get data') + }) + + stream.pause() + + setTimeout(function () { + stream.on('end', function () { + t.ok(calledRead) + }) + stream.resume() + }) +}) diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js new file mode 100644 index 0000000000..8b9190d8c9 --- /dev/null +++ b/test/browser/test-stream-finished.js @@ -0,0 +1,65 @@ +'use strict' + +const test = require('tape') +const { Writable, Readable, Transform, finished } = require('../../lib') + +test('readable finished', function (t) { + t.plan(1) + + const rs = new Readable({ + read: function read() {} + }) + + finished(rs, (err) => { + t.ifErr(err) + }) + + rs.push(null) + rs.resume() +}) + +test('writable finished', function (t) { + t.plan(1) + + const ws = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + + finished(ws, (err) => { + t.ifErr(err) + }) + + ws.end() +}) + +test('transform finished', function (t) { + t.plan(3) + + const tr = new Transform({ + transform: function transform(data, enc, cb) { + cb() + } + }) + + let finish = false + let ended = false + + tr.on('end', function () { + ended = true + }) + + tr.on('finish', function () { + finish = true + }) + + finished(tr, (err) => { + t.ifErr(err) + t.ok(finish) + t.ok(ended) + }) + + tr.end() + tr.resume() +}) diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js new file mode 100644 index 0000000000..27cb33ad11 --- /dev/null +++ b/test/browser/test-stream-ispaused.js @@ -0,0 +1,27 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +test('is paused', function (t) { + t.plan(4) + + const readable = new stream.Readable() + + // _read is a noop, here. + readable._read = () => {} + + // default state of a stream is not "paused" + t.notOk(readable.isPaused()) + + // make the stream start flowing... + readable.on('data', () => {}) + + // still not paused. + t.notOk(readable.isPaused()) + + readable.pause() + t.ok(readable.isPaused()) + readable.resume() + t.notOk(readable.isPaused()) +}) diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js new file mode 100644 index 0000000000..24401fb140 --- /dev/null +++ b/test/browser/test-stream-pipe-after-end.js @@ -0,0 +1,67 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable, Writable } = require('../../lib') + +test('pipe after end', function (t) { + t.plan(4) + + function TestReadable(opt) { + if (!(this instanceof TestReadable)) { + return new TestReadable(opt) + } + Readable.call(this, opt) + this._ended = false + } + inherits(TestReadable, Readable) + + TestReadable.prototype._read = function (n) { + if (this._ended) { + this.emit('error', new Error('_read called twice')) + } + this._ended = true + this.push(null) + } + + function TestWritable(opt) { + if (!(this instanceof TestWritable)) { + return new TestWritable(opt) + } + Writable.call(this, opt) + this._written = [] + } + inherits(TestWritable, Writable) + + TestWritable.prototype._write = function (chunk, encoding, cb) { + this._written.push(chunk) + cb() + } + + // this one should not emit 'end' until we read() from it later. + const ender = new TestReadable() + let enderEnded = false + + // what happens when you pipe() a Readable that's already ended? + const piper = new TestReadable() + // pushes EOF null, and length=0, so this will trigger 'end' + piper.read() + + setTimeout(function () { + ender.on('end', function () { + enderEnded = true + t.ok(true, 'enderEnded') + }) + t.notOk(enderEnded) + + const c = ender.read() + t.equal(c, null) + + const w = new TestWritable() + w.on('finish', function () { + t.ok(true, 'writableFinished') + }) + + piper.pipe(w) + }) +}) diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js new file mode 100644 index 0000000000..e49cf4c5f1 --- /dev/null +++ b/test/browser/test-stream-pipe-cleanup-pause.js @@ -0,0 +1,46 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +test('pipe cleanup pause', function (t) { + t.plan(3) + + const reader = new stream.Readable() + const writer1 = new stream.Writable() + const writer2 = new stream.Writable() + + // 560000 is chosen here because it is larger than the (default) highWaterMark + // and will cause `.write()` to return false + // See: https://github.com/nodejs/node/issues/2323 + const buffer = Buffer.alloc(560000) + + reader._read = function () {} + + writer1._write = function (chunk, encoding, cb) { + this.emit('chunk-received') + cb() + } + + writer1.on('chunk-received', function () { + reader.unpipe(writer1) + reader.pipe(writer2) + reader.push(buffer) + + setImmediate(function () { + reader.push(buffer) + + setImmediate(function () { + reader.push(buffer) + }) + }) + }) + + writer2._write = function (chunk, encoding, cb) { + t.ok(true) + cb() + } + + reader.pipe(writer1) + reader.push(buffer) +}) diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js new file mode 100644 index 0000000000..8350a8297a --- /dev/null +++ b/test/browser/test-stream-pipe-cleanup.js @@ -0,0 +1,115 @@ +'use strict' +// This test asserts that Stream.prototype.pipe does not leave listeners +// hanging on the source or dest. + +const test = require('tape') +const inherits = require('inherits') +const { Stream } = require('../../lib') + +test('pipe cleanup', function (t) { + t.plan(27) + + if (/^v0\.8\./.test(process.version)) { + return + } + + function Writable() { + this.writable = true + this.endCalls = 0 + Stream.call(this) + } + inherits(Writable, Stream) + + Writable.prototype.end = function () { + this.endCalls++ + } + + Writable.prototype.destroy = function () { + this.endCalls++ + } + + function Readable() { + this.readable = true + Stream.call(this) + } + + inherits(Readable, Stream) + + Readable.prototype._read = function () {} + + function Duplex() { + this.readable = true + Writable.call(this) + } + + inherits(Duplex, Writable) + + Duplex.prototype._read = function () {} + + let i = 0 + let r + let w = new Writable() + const limit = 100 + + for (i = 0; i < limit; i++) { + r = new Readable() + r.pipe(w) + r.emit('end') + } + t.equal(0, r.listeners('end').length) + t.equal(limit, w.endCalls) + + w.endCalls = 0 + + for (i = 0; i < limit; i++) { + r = new Readable() + r.pipe(w) + r.emit('close') + } + t.equal(0, r.listeners('close').length) + t.equal(limit, w.endCalls) + + w.endCalls = 0 + + r = new Readable() + + for (i = 0; i < limit; i++) { + w = new Writable() + r.pipe(w) + w.emit('close') + } + t.equal(0, w.listeners('close').length) + + r = new Readable() + w = new Writable() + const d = new Duplex() + r.pipe(d) // pipeline A + d.pipe(w) // pipeline B + t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup + t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup + t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup + t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup + t.equal(w.listeners('end').length, 0) + t.equal(w.listeners('close').length, 1) // B.cleanup + + r.emit('end') + t.equal(d.endCalls, 1) + t.equal(w.endCalls, 0) + t.equal(r.listeners('end').length, 0) + t.equal(r.listeners('close').length, 0) + t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup + t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup + t.equal(w.listeners('end').length, 0) + t.equal(w.listeners('close').length, 1) // B.cleanup + + d.emit('end') + t.equal(d.endCalls, 1) + t.equal(w.endCalls, 1) + t.equal(r.listeners('end').length, 0) + t.equal(r.listeners('close').length, 0) + t.equal(d.listeners('end').length, 0) + t.equal(d.listeners('close').length, 0) + t.equal(w.listeners('end').length, 0) + t.equal(w.listeners('close').length, 0) + d.end() +}) diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js new file mode 100644 index 0000000000..005167679c --- /dev/null +++ b/test/browser/test-stream-pipe-error-handling.js @@ -0,0 +1,105 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable, Stream } = require('../../lib') + +test('Error Listener Catches', function (t) { + t.plan(1) + + const source = new Stream() + const dest = new Stream() + + source._read = function () {} + source.pipe(dest) + + let gotErr = null + source.on('error', function (err) { + gotErr = err + }) + + const err = new Error('This stream turned into bacon.') + source.emit('error', err) + t.strictEqual(gotErr, err) +}) + +test('Error WithoutListener Throws', function (t) { + t.plan(1) + + const source = new Stream() + const dest = new Stream() + + source._read = function () {} + source.pipe(dest) + + const err = new Error('This stream turned into bacon.') + + let gotErr = null + try { + source.emit('error', err) + } catch (e) { + gotErr = e + } + + t.strictEqual(gotErr, err) +}) + +test('Error With Removed Listener Throws', function (t) { + t.plan(2) + + const onerror = global.onerror + + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + global.onerror = () => { + t.notOk(caught) + global.onerror = onerror + } + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) + r.pipe(w) + w.removeListener('error', myOnError) + removed = true + + function myOnError(er) { + caught = true + } +}) + +test('Error Listener Catches When Wrong Listener Is Removed', function (t) { + t.plan(2) + + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) + w._write = function () {} + + r.pipe(w) + // Removing some OTHER random listener should not do anything + w.removeListener('error', function () {}) + removed = true + + function myOnError(er) { + t.notOk(caught) + caught = true + } +}) diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js new file mode 100644 index 0000000000..e39109f645 --- /dev/null +++ b/test/browser/test-stream-pipe-event.js @@ -0,0 +1,34 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Stream } = require('../../lib') + +test('pipe event', function (t) { + t.plan(1) + + function Writable() { + this.writable = true + Stream.call(this) + } + inherits(Writable, Stream) + + function Readable() { + this.readable = true + Stream.call(this) + } + inherits(Readable, Stream) + + let passed = false + + const w = new Writable() + w.on('pipe', function (src) { + passed = true + }) + + const r = new Readable() + r._read = function () {} + r.pipe(w) + + t.ok(passed) +}) diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js new file mode 100644 index 0000000000..448d362a7c --- /dev/null +++ b/test/browser/test-stream-pipe-without-listenerCount.js @@ -0,0 +1,20 @@ +'use strict' + +const test = require('tape') +const { Stream } = require('../../lib') + +test('pipe without listenerCount on read', function (t) { + t.plan(1) + + const r = new Stream({ + read: function () {} + }) + r.listenerCount = undefined + + const w = new Stream() + w.on('pipe', function () { + r.emit('error', new Error('Readable Error')) + }) + + t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function') +}) diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js new file mode 100644 index 0000000000..0e1180a7f2 --- /dev/null +++ b/test/browser/test-stream-pipeline.js @@ -0,0 +1,109 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable, pipeline } = require('../../lib') + +test('pipeline', function (t) { + t.plan(3) + + let finished = false + + const processed = [] + const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] + + const read = new Readable({ + read: function read() {} + }) + + const write = new Writable({ + write: function write(data, enc, cb) { + processed.push(data) + cb() + } + }) + + write.on('finish', function () { + finished = true + }) + + for (let i = 0; i < expected.length; i++) { + read.push(expected[i]) + } + + read.push(null) + pipeline(read, write, (err) => { + t.ifErr(err) + t.ok(finished) + t.deepEqual(processed, expected) + }) +}) + +test('pipeline missing args', function (t) { + t.plan(3) + + const _read = new Readable({ + read: function read() {} + }) + + t.throws(function () { + pipeline(_read, function () {}) + }) + + t.throws(function () { + pipeline(function () {}) + }) + + t.throws(function () { + pipeline() + }) +}) + +test('pipeline error', function (t) { + t.plan(1) + + const _read2 = new Readable({ + read: function read() {} + }) + + const _write = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + + _read2.push('data') + + setImmediate(function () { + return _read2.destroy() + }) + + pipeline(_read2, _write, (err) => { + t.equal(err.message, 'Premature close') + }) +}) + +test('pipeline destroy', function (t) { + t.plan(2) + + const _read3 = new Readable({ + read: function read() {} + }) + + const _write2 = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + + _read3.push('data') + + setImmediate(function () { + return _read3.destroy(new Error('kaboom')) + }) + + const dst = pipeline(_read3, _write2, (err) => { + t.equal(err.message, 'kaboom') + }) + + t.equal(dst, _write2) +}) diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js new file mode 100644 index 0000000000..e5aef44618 --- /dev/null +++ b/test/browser/test-stream-push-order.js @@ -0,0 +1,32 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('push order', function (t) { + t.plan(1) + + const s = new Readable({ + highWaterMark: 20, + encoding: 'ascii' + }) + + const list = ['1', '2', '3', '4', '5', '6'] + + s._read = function (n) { + const one = list.shift() + if (!one) { + s.push(null) + } else { + const two = list.shift() + s.push(one) + s.push(two) + } + } + + s.read(0) + + setTimeout(function () { + t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6') + }) +}) diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js new file mode 100644 index 0000000000..5344cdf1e0 --- /dev/null +++ b/test/browser/test-stream-push-strings.js @@ -0,0 +1,55 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable } = require('../../lib') + +test('push strings', function (t) { + t.plan(2) + + function MyStream(options) { + Readable.call(this, options) + this._chunks = 3 + } + + inherits(MyStream, Readable) + + MyStream.prototype._read = function (n) { + switch (this._chunks--) { + case 0: + return this.push(null) + case 1: + return setTimeout( + function () { + this.push('last chunk') + }.bind(this), + 100 + ) + case 2: + return this.push('second to last chunk') + case 3: + return process.nextTick( + function () { + this.push('first chunk') + }.bind(this) + ) + default: + throw new Error('?') + } + } + const expect = ['first chunksecond to last chunk', 'last chunk'] + + const ms = new MyStream() + const results = [] + ms.on('readable', function () { + let chunk + while ((chunk = ms.read()) !== null) { + results.push(chunk + '') + } + }) + + ms.on('end', function () { + t.equal(ms._chunks, -1) + t.deepEqual(results, expect) + }) +}) diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js new file mode 100644 index 0000000000..9d1fd3f234 --- /dev/null +++ b/test/browser/test-stream-readable-constructor-set-methods.js @@ -0,0 +1,23 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('readable constructor set methods', function (t) { + t.plan(2) + + let _readCalled = false + + function _read(n) { + _readCalled = true + this.push(null) + } + + const r = new Readable({ read: _read }) + r.resume() + + setTimeout(function () { + t.equal(r._read, _read) + t.ok(_readCalled) + }) +}) diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js new file mode 100644 index 0000000000..ae611f6b83 --- /dev/null +++ b/test/browser/test-stream-readable-event.js @@ -0,0 +1,105 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('readable events - first', (t) => { + t.plan(3) + + // First test, not reading when the readable is added. + // make sure that on('readable', ...) triggers a readable event. + const r = new Readable({ + highWaterMark: 3 + }) + + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')) + + let caughtReadable = false + setTimeout(function () { + // we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + + t.ok(caughtReadable) + }) + }) + }) +}) + +test('readable events - second', (t) => { + t.plan(3) + + // second test, make sure that readable is re-emitted if there's + // already a length, while it IS reading. + + const r = new Readable({ + highWaterMark: 3 + }) + + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('bl')) + + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.ok(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.ok(_readCalled) + + t.ok(caughtReadable) + }) + }) + }) +}) + +test('readable events - third', (t) => { + t.plan(3) + + // Third test, not reading when the stream has not passed + // the highWaterMark but *has* reached EOF. + const r = new Readable({ + highWaterMark: 30 + }) + + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')) + r.push(null) + + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + + t.ok(caughtReadable) + }) + }) + }) +}) diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js new file mode 100644 index 0000000000..92e327eed5 --- /dev/null +++ b/test/browser/test-stream-sync-write.js @@ -0,0 +1,46 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Writable } = require('../../lib') + +test('should bea ble to write sync', function (t) { + t.plan(2) + + let internalCalls = 0 + let externalCalls = 0 + + const InternalStream = function () { + Writable.call(this) + } + inherits(InternalStream, Writable) + + InternalStream.prototype._write = function (chunk, encoding, callback) { + internalCalls++ + callback() + } + + const internalStream = new InternalStream() + + const ExternalStream = function (writable) { + this._writable = writable + Writable.call(this) + } + inherits(ExternalStream, Writable) + + ExternalStream.prototype._write = function (chunk, encoding, callback) { + externalCalls++ + this._writable.write(chunk, encoding, callback) + } + + const externalStream = new ExternalStream(internalStream) + + for (let i = 0; i < 2000; i++) { + externalStream.write(i.toString()) + } + + externalStream.end(() => { + t.equal(internalCalls, 2000) + t.equal(externalCalls, 2000) + }) +}) diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js new file mode 100644 index 0000000000..4cefa63dff --- /dev/null +++ b/test/browser/test-stream-transform-constructor-set-methods.js @@ -0,0 +1,35 @@ +'use strict' + +const test = require('tape') +const { Transform } = require('../../lib') + +test('transform constructor set methods', function (t) { + t.plan(4) + + let _transformCalled = false + function _transform(d, e, n) { + _transformCalled = true + n() + } + + let _flushCalled = false + function _flush(n) { + _flushCalled = true + n() + } + + const tr = new Transform({ + transform: _transform, + flush: _flush + }) + + tr.end(Buffer.from('blerg')) + tr.resume() + + tr.on('end', function () { + t.equal(tr._transform, _transform) + t.equal(tr._flush, _flush) + t.ok(_transformCalled) + t.ok(_flushCalled) + }) +}) diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js new file mode 100644 index 0000000000..b496acb6de --- /dev/null +++ b/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -0,0 +1,35 @@ +'use strict' + +const test = require('tape') +const { PassThrough } = require('../../lib') + +test('transform objectmode falsey value', function (t) { + t.plan(13) + + const src = new PassThrough({ objectMode: true }) + const tx = new PassThrough({ objectMode: true }) + const dest = new PassThrough({ objectMode: true }) + + const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + const results = [] + dest.on('end', function () { + t.deepEqual(results, expect) + }) + + dest.on('data', function (x) { + results.push(x) + }) + + src.pipe(tx).pipe(dest) + + let i = -1 + const int = setInterval(function () { + if (i > 10) { + src.end() + clearInterval(int) + } else { + t.ok(true) + src.write(i++) + } + }, 10) +}) diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js new file mode 100644 index 0000000000..e23beb53ed --- /dev/null +++ b/test/browser/test-stream-transform-split-objectmode.js @@ -0,0 +1,57 @@ +'use strict' + +const test = require('tape') +const { Transform } = require('../../lib') + +test('transform split objectmode', function (t) { + t.plan(10) + + const parser = new Transform({ readableObjectMode: true }) + + t.ok(parser._readableState.objectMode, 'parser 1') + t.notOk(parser._writableState.objectMode, 'parser 2') + t.equals(parser._readableState.highWaterMark, 16, 'parser 3') + t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4') + + parser._transform = function (chunk, enc, callback) { + callback(null, { val: chunk[0] }) + } + + let parsed + + parser.on('data', function (obj) { + parsed = obj + }) + + parser.end(Buffer.from([42])) + + parser.on('end', function () { + t.equals(parsed.val, 42, 'parser ended') + }) + + const serializer = new Transform({ writableObjectMode: true }) + + t.notOk(serializer._readableState.objectMode, 'serializer 1') + t.ok(serializer._writableState.objectMode, 'serializer 2') + t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3') + t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4') + + serializer._transform = function (obj, _, callback) { + callback(null, Buffer.from([obj.val])) + } + + let serialized + + serializer.on('data', function (chunk) { + serialized = chunk + }) + + serializer.write({ val: 42 }) + + serializer.on('end', function () { + t.equals(serialized[0], 42, 'searlizer ended') + }) + setImmediate(function () { + serializer.end() + }) +}) diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js new file mode 100644 index 0000000000..b95f11bae5 --- /dev/null +++ b/test/browser/test-stream-unshift-empty-chunk.js @@ -0,0 +1,62 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('unshift empty chunk', function (t) { + t.plan(1) + + const r = new Readable() + let nChunks = 10 + const chunk = Buffer.alloc(10) + chunk.fill('x') + + r._read = function (n) { + setTimeout(function () { + r.push(--nChunks === 0 ? null : chunk) + }) + } + + let readAll = false + const seen = [] + r.on('readable', function () { + let chunk + while ((chunk = r.read())) { + seen.push(chunk.toString()) + // simulate only reading a certain amount of the data, + // and then putting the rest of the chunk back into the + // stream, like a parser might do. We just fill it with + // 'y' so that it's easy to see which bits were touched, + // and which were not. + const putBack = Buffer.alloc(readAll ? 0 : 5) + putBack.fill('y') + readAll = !readAll + r.unshift(putBack) + } + }) + + const expect = [ + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy' + ] + + r.on('end', function () { + t.deepEqual(seen, expect) + }) +}) diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js new file mode 100644 index 0000000000..baa6477878 --- /dev/null +++ b/test/browser/test-stream-unshift-read-race.js @@ -0,0 +1,121 @@ +'use strict' + +// This test verifies that: +// 1. unshift() does not cause colliding _read() calls. +// 2. unshift() after the 'end' event is an error, but after the EOF +// signalling null, it is ok, and just creates a new readable chunk. +// 3. push() after the EOF signaling null is an error. +// 4. _read() is not called after pushing the EOF null chunk. + +const test = require('tape') +const stream = require('../../lib') + +test('unshift read race', function (t) { + t.plan(139) + + const hwm = 10 + const r = stream.Readable({ highWaterMark: hwm }) + const chunks = 10 + + const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2)) + for (let i = 0; i < data.length; i++) { + const c = 'asdf'.charCodeAt(i % 4) + data[i] = c + } + + let pos = 0 + let pushedNull = false + r._read = function (n) { + t.notOk(pushedNull, '_read after null push') + + // every third chunk is fast + push(!(chunks % 3)) + + function push(fast) { + t.notOk(pushedNull, 'push() after null push') + const c = pos >= data.length ? null : data.slice(pos, pos + n) + pushedNull = c === null + if (fast) { + pos += n + r.push(c) + if (c === null) { + pushError() + } + } else { + setTimeout(function () { + pos += n + r.push(c) + if (c === null) { + pushError() + } + }, 1) + } + } + } + + function pushError() { + r.unshift(Buffer.allocUnsafe(1)) + w.end() + + const onerror = global.onerror + global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { + t.ok(true) + global.onerror = onerror + } + + r.push(Buffer.allocUnsafe(1)) + } + + const w = stream.Writable() + const written = [] + w._write = function (chunk, encoding, cb) { + written.push(chunk.toString()) + cb() + } + + r.on('end', t.fail) + + r.on('readable', function () { + let chunk + while ((chunk = r.read(10)) !== null) { + w.write(chunk) + if (chunk.length > 4) { + r.unshift(Buffer.from('1234')) + } + } + }) + + w.on('finish', function () { + // each chunk should start with 1234, and then be asfdasdfasdf... + // The first got pulled out before the first unshift('1234'), so it's + // lacking that piece. + t.equal(written[0], 'asdfasdfas') + let asdf = 'd' + + // console.error('0: %s', written[0]); + for (let i = 1; i < written.length; i++) { + // console.error('%s: %s', i.toString(32), written[i]); + t.equal(written[i].slice(0, 4), '1234') + for (let j = 4; j < written[i].length; j++) { + const c = written[i].charAt(j) + t.equal(c, asdf) + switch (asdf) { + case 'a': + asdf = 's' + break + case 's': + asdf = 'd' + break + case 'd': + asdf = 'f' + break + case 'f': + asdf = 'a' + break + } + } + } + + t.equal(written.length, 18) + }) +}) diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js new file mode 100644 index 0000000000..5f664be178 --- /dev/null +++ b/test/browser/test-stream-writable-change-default-encoding.js @@ -0,0 +1,69 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const stream = require('../../lib') + +inherits(MyWritable, stream.Writable) + +MyWritable.prototype._write = function (chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding) + callback() +} + +function MyWritable(fn, options) { + stream.Writable.call(this, options) + this.fn = fn +} + +test('defaultCondingIsUtf8', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'utf8') + }, + { decodeStrings: false } + ) + m.write('foo') + m.end() +}) + +test('changeDefaultEncodingToAscii', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { decodeStrings: false } + ) + m.setDefaultEncoding('ascii') + m.write('bar') + m.end() +}) + +test('changeDefaultEncodingToInvalidValue', (t) => { + t.plan(1) + + t.throws(function () { + const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false }) + m.setDefaultEncoding({}) + m.write('bar') + m.end() + }, TypeError) +}) + +test('checkVairableCaseEncoding', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { decodeStrings: false } + ) + m.setDefaultEncoding('AsCii') + m.write('bar') + m.end() +}) diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js new file mode 100644 index 0000000000..71c6f55e6e --- /dev/null +++ b/test/browser/test-stream-writable-constructor-set-methods.js @@ -0,0 +1,38 @@ +'use strict' + +const test = require('tape') +const { Writable } = require('../../lib') + +test('writable constructor set methods', function (t) { + t.plan(5) + + let _writeCalled = false + function _write(d, e, n) { + _writeCalled = true + } + + const w = new Writable({ write: _write }) + w.end(Buffer.from('blerg')) + + let _writevCalled = false + let dLength = 0 + function _writev(d, n) { + dLength = d.length + _writevCalled = true + } + + const w2 = new Writable({ writev: _writev }) + w2.cork() + + w2.write(Buffer.from('blerg')) + w2.write(Buffer.from('blerg')) + w2.end() + + setImmediate(function () { + t.equal(w._write, _write) + t.ok(_writeCalled) + t.equal(w2._writev, _writev) + t.equal(dLength, 2) + t.ok(_writevCalled) + }) +}) diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js new file mode 100644 index 0000000000..4dbba262d8 --- /dev/null +++ b/test/browser/test-stream-writable-decoded-encoding.js @@ -0,0 +1,49 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const stream = require('../../lib') + +function MyWritable(fn, options) { + stream.Writable.call(this, options) + this.fn = fn +} + +inherits(MyWritable, stream.Writable) + +MyWritable.prototype._write = function (chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding) + callback() +} + +test('decodeStringsTrue', (t) => { + t.plan(3) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.ok(isBuffer) + t.equal(type, 'object') + t.equal(enc, 'buffer') + // console.log('ok - decoded string is decoded'); + }, + { decodeStrings: true } + ) + m.write('some-text', 'utf8') + m.end() +}) + +test('decodeStringsFalse', (t) => { + t.plan(3) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.notOk(isBuffer) + t.equal(type, 'string') + t.equal(enc, 'utf8') + // console.log('ok - un-decoded string is not decoded'); + }, + { decodeStrings: false } + ) + m.write('some-text', 'utf8') + m.end() +}) diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js new file mode 100644 index 0000000000..a100517f3f --- /dev/null +++ b/test/browser/test-stream-writev.js @@ -0,0 +1,101 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +const queue = [] +for (let decode = 0; decode < 2; decode++) { + for (let uncork = 0; uncork < 2; uncork++) { + for (let multi = 0; multi < 2; multi++) { + queue.push([!!decode, !!uncork, !!multi]) + } + } +} + +function runTest(decode, uncork, multi) { + return function (t) { + t.plan(8) + + // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi); + let counter = 0 + let expectCount = 0 + function cnt(msg) { + expectCount++ + const expect = expectCount + return function (er) { + if (er) { + throw er + } + counter++ + t.equal(counter, expect) + } + } + + const w = new stream.Writable({ decodeStrings: decode }) + w._write = function (chunk, e, cb) { + t.ok(false, 'Should not call _write') + } + + const expectChunks = decode + ? [ + { encoding: 'buffer', chunk: [104, 101, 108, 108, 111, 44, 32] }, + { encoding: 'buffer', chunk: [119, 111, 114, 108, 100] }, + { encoding: 'buffer', chunk: [33] }, + { encoding: 'buffer', chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, + { encoding: 'buffer', chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] } + ] + : [ + { encoding: 'ascii', chunk: 'hello, ' }, + { encoding: 'utf8', chunk: 'world' }, + { encoding: 'buffer', chunk: [33] }, + { encoding: 'binary', chunk: '\nand then...' }, + { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' } + ] + + let actualChunks + w._writev = function (chunks, cb) { + actualChunks = chunks.map(function (chunk) { + return { + encoding: chunk.encoding, + chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk + } + }) + cb() + } + + w.cork() + w.write('hello, ', 'ascii', cnt('hello')) + w.write('world', 'utf8', cnt('world')) + + if (multi) { + w.cork() + } + + w.write(Buffer.from('!'), 'buffer', cnt('!')) + w.write('\nand then...', 'binary', cnt('and then')) + + if (multi) { + w.uncork() + } + + w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')) + + if (uncork) { + w.uncork() + } + + w.end(cnt('end')) + + w.on('finish', function () { + // make sure finish comes after all the write cb + cnt('finish')() + t.deepEqual(expectChunks, actualChunks) + }) + } +} + +for (let i = 0; i < queue.length; i++) { + const tr = queue[i] + + test('round ' + i, runTest(tr[0], tr[1], tr[2])) +} diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js new file mode 100644 index 0000000000..dd5dc5bf61 --- /dev/null +++ b/test/browser/test-stream2-base64-single-char-read-end.js @@ -0,0 +1,39 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable } = require('../../lib') + +test('base64 single char read end', function (t) { + t.plan(1) + + const src = new Readable({ encoding: 'base64' }) + const dst = new Writable() + let hasRead = false + const accum = [] + + src._read = function (n) { + if (!hasRead) { + hasRead = true + process.nextTick(function () { + src.push(Buffer.from('1')) + src.push(null) + }) + } + } + + dst._write = function (chunk, enc, cb) { + accum.push(chunk) + cb() + } + + src.on('end', function () { + t.equal(Buffer.concat(accum) + '', 'MQ==') + clearTimeout(timeout) + }) + + src.pipe(dst) + + const timeout = setTimeout(function () { + t.fail('timed out waiting for _write') + }, 100) +}) diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js new file mode 100644 index 0000000000..d9abbba6c0 --- /dev/null +++ b/test/browser/test-stream2-compatibility.js @@ -0,0 +1,34 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable } = require('../../lib') + +test('compatibility', function (t) { + t.plan(1) + + let ondataCalled = 0 + + function TestReader() { + Readable.apply(this) + this._buffer = Buffer.alloc(100) + this._buffer.fill('x') + + this.on('data', function () { + ondataCalled++ + }) + } + + inherits(TestReader, Readable) + + TestReader.prototype._read = function (n) { + this.push(this._buffer) + this._buffer = Buffer.alloc(0) + } + + setTimeout(function () { + t.equal(ondataCalled, 1) + }) + + new TestReader().read() +}) diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js new file mode 100644 index 0000000000..42c8e66cc7 --- /dev/null +++ b/test/browser/test-stream2-large-read-stall.js @@ -0,0 +1,60 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('large object read stall', function (t) { + t.plan(1) + + // If everything aligns so that you do a read(n) of exactly the + // remaining buffer, then make sure that 'end' still emits. + + const READSIZE = 100 + const PUSHSIZE = 20 + const PUSHCOUNT = 1000 + const HWM = 50 + + const r = new Readable({ + highWaterMark: HWM + }) + const rs = r._readableState + + r._read = push + + r.on('readable', function () { + false && console.error('>> readable') + do { + false && console.error(' > read(%d)', READSIZE) + var ret = r.read(READSIZE) + false && console.error(' < %j (%d remain)', ret && ret.length, rs.length) + } while (ret && ret.length === READSIZE) + + false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length) + }) + + r.on('end', function () { + t.equal(pushes, PUSHCOUNT + 1) + + false && console.error('end') + }) + + let pushes = 0 + function push() { + if (pushes > PUSHCOUNT) { + return + } + + if (pushes++ === PUSHCOUNT) { + false && console.error(' push(EOF)') + return r.push(null) + } + + false && console.error(' push #%d', pushes) + if (r.push(Buffer.alloc(PUSHSIZE))) { + setTimeout(push) + } + } + + // start the flow + r.read(0) +}) diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js new file mode 100644 index 0000000000..75881abf8d --- /dev/null +++ b/test/browser/test-stream2-objects.js @@ -0,0 +1,304 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable } = require('../../lib') + +function toArray(callback) { + const stream = new Writable({ objectMode: true }) + const list = [] + stream.write = function (chunk) { + list.push(chunk) + } + + stream.end = function () { + callback(list) + } + + return stream +} + +function fromArray(list) { + const r = new Readable({ objectMode: true }) + r._read = noop + forEach(list, function (chunk) { + r.push(chunk) + }) + r.push(null) + + return r +} + +function noop() {} + +test('can read objects from stream', function (t) { + t.plan(3) + + const r = fromArray([{ one: '1' }, { two: '2' }]) + + const v1 = r.read() + const v2 = r.read() + const v3 = r.read() + + t.deepEqual(v1, { one: '1' }) + t.deepEqual(v2, { two: '2' }) + t.deepEqual(v3, null) +}) + +test('can pipe objects into stream', function (t) { + t.plan(1) + + const r = fromArray([{ one: '1' }, { two: '2' }]) + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) +}) + +test('read(n) is ignored', function (t) { + t.plan(1) + + const r = fromArray([{ one: '1' }, { two: '2' }]) + + const value = r.read(2) + + t.deepEqual(value, { one: '1' }) +}) + +test('can read objects from _read (sync)', function (t) { + t.plan(1) + + const r = new Readable({ objectMode: true }) + const list = [{ one: '1' }, { two: '2' }] + r._read = function (n) { + const item = list.shift() + r.push(item || null) + } + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) +}) + +test('can read objects from _read (async)', function (t) { + t.plan(1) + + const r = new Readable({ objectMode: true }) + const list = [{ one: '1' }, { two: '2' }] + r._read = function (n) { + const item = list.shift() + process.nextTick(function () { + r.push(item || null) + }) + } + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) +}) + +test('can read strings as objects', function (t) { + t.plan(1) + + const r = new Readable({ + objectMode: true + }) + r._read = noop + const list = ['one', 'two', 'three'] + forEach(list, function (str) { + r.push(str) + }) + r.push(null) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, list) + }) + ) +}) + +test('read(0) for object streams', function (t) { + t.plan(1) + + const r = new Readable({ + objectMode: true + }) + r._read = noop + + r.push('foobar') + r.push(null) + + r.read(0) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, ['foobar']) + }) + ) +}) + +test('falsey values', function (t) { + t.plan(1) + + const r = new Readable({ + objectMode: true + }) + r._read = noop + + r.push(false) + r.push(0) + r.push('') + r.push(null) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, [false, 0, '']) + }) + ) +}) + +test('high watermark _read', function (t) { + t.plan(5) + + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) + let calls = 0 + const list = ['1', '2', '3', '4', '5', '6', '7', '8'] + + r._read = function (n) { + calls++ + } + + forEach(list, function (c) { + r.push(c) + }) + + const v = r.read() + + t.equal(calls, 0) + t.equal(v, '1') + + const v2 = r.read() + t.equal(v2, '2') + + const v3 = r.read() + t.equal(v3, '3') + + t.equal(calls, 1) +}) + +test('high watermark push', function (t) { + t.plan(6) + + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) + r._read = function (n) {} + for (let i = 0; i < 6; i++) { + const bool = r.push(i) + t.equal(bool, i !== 5) + } +}) + +test('can write objects to stream', function (t) { + t.plan(1) + + const w = new Writable({ objectMode: true }) + + w._write = function (chunk, encoding, cb) { + t.deepEqual(chunk, { foo: 'bar' }) + cb() + } + + w.on('finish', function () {}) + + w.write({ foo: 'bar' }) + w.end() +}) + +test('can write multiple objects to stream', function (t) { + t.plan(1) + + const w = new Writable({ objectMode: true }) + const list = [] + + w._write = function (chunk, encoding, cb) { + list.push(chunk) + cb() + } + + w.on('finish', function () { + t.deepEqual(list, [0, 1, 2, 3, 4]) + }) + + w.write(0) + w.write(1) + w.write(2) + w.write(3) + w.write(4) + w.end() +}) + +test('can write strings as objects', function (t) { + t.plan(1) + + const w = new Writable({ + objectMode: true + }) + const list = [] + + w._write = function (chunk, encoding, cb) { + list.push(chunk) + process.nextTick(cb) + } + + w.on('finish', function () { + t.deepEqual(list, ['0', '1', '2', '3', '4']) + }) + + w.write('0') + w.write('1') + w.write('2') + w.write('3') + w.write('4') + w.end() +}) + +test('buffers finish until cb is called', function (t) { + t.plan(2) + + const w = new Writable({ + objectMode: true + }) + let called = false + + w._write = function (chunk, encoding, cb) { + t.equal(chunk, 'foo') + + process.nextTick(function () { + called = true + cb() + }) + } + + w.on('finish', function () { + t.equal(called, true) + }) + + w.write('foo') + w.end() +}) + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js new file mode 100644 index 0000000000..2eef740e9c --- /dev/null +++ b/test/browser/test-stream2-pipe-error-handling.js @@ -0,0 +1,89 @@ +'use strict' + +const test = require('tape') +const stream = require('../../lib') + +test('Error Listener Catches', function (t) { + t.plan(3) + + let count = 1000 + + const source = new stream.Readable() + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + + let gotErr = null + dest.on('error', function (err) { + gotErr = err + }) + + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + + const err = new Error('This stream turned into bacon.') + dest.emit('error', err) + t.strictEqual(gotErr, err) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) +}) + +test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { + t.plan(3) + + let count = 1000 + + const source = new stream.Readable() + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + + const err = new Error('This stream turned into bacon.') + const onerror = global.onerror + + dest.emit('error', err) + + global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { + t.strictEqual(gotErr, err) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) + global.onerror = onerror + } +}) diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js new file mode 100644 index 0000000000..afb29324b6 --- /dev/null +++ b/test/browser/test-stream2-pipe-error-once-listener.js @@ -0,0 +1,39 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const stream = require('../../lib') + +test('pipe error once listener', function (t) { + t.plan(1) + + const Read = function () { + stream.Readable.call(this) + } + inherits(Read, stream.Readable) + + Read.prototype._read = function (size) { + this.push('x') + this.push(null) + } + + const Write = function () { + stream.Writable.call(this) + } + inherits(Write, stream.Writable) + + Write.prototype._write = function (buffer, encoding, cb) { + this.emit('error', new Error('boom')) + this.emit('alldone') + } + + const read = new Read() + const write = new Write() + + write.once('error', () => {}) + write.once('alldone', function () { + t.ok(true) + }) + + read.pipe(write) +}) diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js new file mode 100644 index 0000000000..ba7c4eb39e --- /dev/null +++ b/test/browser/test-stream2-push.js @@ -0,0 +1,117 @@ +'use strict' + +const test = require('tape') +const { EventEmitter: EE } = require('events') +const { Readable, Writable } = require('../../lib') + +test('push', function (t) { + t.plan(33) + + const stream = new Readable({ + highWaterMark: 16, + encoding: 'utf8' + }) + + const source = new EE() + + stream._read = function () { + // console.error('stream._read'); + readStart() + } + + let ended = false + stream.on('end', function () { + ended = true + }) + + source.on('data', function (chunk) { + const ret = stream.push(chunk) + // console.error('data', stream._readableState.length); + if (!ret) { + readStop() + } + }) + + source.on('end', function () { + stream.push(null) + }) + + let reading = false + + function readStart() { + // console.error('readStart'); + reading = true + } + + function readStop() { + // console.error('readStop'); + reading = false + process.nextTick(function () { + const r = stream.read() + if (r !== null) { + writer.write(r) + } + }) + } + + const writer = new Writable({ + decodeStrings: false + }) + + const written = [] + + const expectWritten = [ + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg' + ] + + writer._write = function (chunk, encoding, cb) { + // console.error('WRITE %s', chunk); + written.push(chunk) + process.nextTick(cb) + } + + writer.on('finish', finish) + + // now emit some chunks. + + const chunk = 'asdfg' + + let set = 0 + readStart() + data() + function data() { + t.ok(reading) + source.emit('data', chunk) + t.ok(reading) + source.emit('data', chunk) + t.ok(reading) + source.emit('data', chunk) + t.ok(reading) + source.emit('data', chunk) + t.notOk(reading) + if (set++ < 5) { + setTimeout(data, 10) + } else { + end() + } + } + + function finish() { + // console.error('finish'); + t.deepEqual(written, expectWritten) + } + + function end() { + source.emit('end') + t.notOk(reading) + writer.end(stream.read()) + setTimeout(function () { + t.ok(ended) + }) + } +}) diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js new file mode 100644 index 0000000000..aa2fce315c --- /dev/null +++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -0,0 +1,93 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('readable empty buffer no eof 1', function (t) { + t.plan(1) + + const r = new Readable() + + // should not end when we get a Buffer(0) or '' as the _read result + // that just means that there is *temporarily* no data, but to go + // ahead and try again later. + // + // note that this is very unusual. it only works for crypto streams + // because the other side of the stream will call read(0) to cycle + // data through openssl. that's why we set the timeouts to call + // r.read(0) again later, otherwise there is no more work being done + // and the process just exits. + + const buf = Buffer.alloc(5) + buf.fill('x') + let reads = 5 + r._read = function (n) { + switch (reads--) { + case 0: + return r.push(null) // EOF + case 1: + return r.push(buf) + case 2: + setTimeout(r.read.bind(r, 0), 50) + return r.push(Buffer.alloc(0)) // Not-EOF! + case 3: + setTimeout(r.read.bind(r, 0), 50) + return process.nextTick(function () { + return r.push(Buffer.alloc(0)) + }) + case 4: + setTimeout(r.read.bind(r, 0), 50) + return setTimeout(function () { + return r.push(Buffer.alloc(0)) + }) + case 5: + return setTimeout(function () { + return r.push(buf) + }) + default: + throw new Error('unreachable') + } + } + + const results = [] + function flow() { + let chunk + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } + } + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']) + }) + flow() +}) + +test('readable empty buffer no eof 2', function (t) { + t.plan(1) + + const r = new Readable({ encoding: 'base64' }) + let reads = 5 + r._read = function (n) { + if (!reads--) { + return r.push(null) // EOF + } else { + return r.push(Buffer.from('x')) + } + } + + const results = [] + function flow() { + let chunk + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } + } + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['eHh4', 'eHg=', 'EOF']) + }) + flow() +}) diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js new file mode 100644 index 0000000000..2bc0809c07 --- /dev/null +++ b/test/browser/test-stream2-readable-from-list.js @@ -0,0 +1,65 @@ +'use strict' + +const test = require('tape') +const { _fromList: fromList } = require('../../lib/_stream_readable') +const BufferList = require('../../lib/internal/streams/buffer_list') + +function bufferListFromArray(arr) { + const bl = new BufferList() + for (let i = 0; i < arr.length; ++i) { + bl.push(arr[i]) + } + return bl +} + +test('buffers', function (t) { + t.plan(5) + + let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] + list = bufferListFromArray(list) + + // read more than the first element. + let ret = fromList(6, { buffer: list, length: 16 }) + t.equal(ret.toString(), 'foogba') + + // read exactly the first element. + ret = fromList(2, { buffer: list, length: 10 }) + t.equal(ret.toString(), 'rk') + + // read less than the first element. + ret = fromList(2, { buffer: list, length: 8 }) + t.equal(ret.toString(), 'ba') + + // read more than we have. + ret = fromList(100, { buffer: list, length: 6 }) + t.equal(ret.toString(), 'zykuel') + + // all consumed. + t.same(list, new BufferList()) +}) + +test('strings', function (t) { + t.plan(5) + + let list = ['foog', 'bark', 'bazy', 'kuel'] + list = bufferListFromArray(list) + + // read more than the first element. + let ret = fromList(6, { buffer: list, length: 16, decoder: true }) + t.equal(ret, 'foogba') + + // read exactly the first element. + ret = fromList(2, { buffer: list, length: 10, decoder: true }) + t.equal(ret, 'rk') + + // read less than the first element. + ret = fromList(2, { buffer: list, length: 8, decoder: true }) + t.equal(ret, 'ba') + + // read more than we have. + ret = fromList(100, { buffer: list, length: 6, decoder: true }) + t.equal(ret, 'zykuel') + + // all consumed. + t.same(list, new BufferList()) +}) diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js new file mode 100644 index 0000000000..40a042e72e --- /dev/null +++ b/test/browser/test-stream2-readable-legacy-drain.js @@ -0,0 +1,45 @@ +'use strict' + +const test = require('tape') +const { Stream, Readable } = require('../../lib') + +test('readable legacy drain', function (t) { + t.plan(3) + + const r = new Readable() + const N = 256 + let reads = 0 + r._read = function (n) { + return r.push(++reads === N ? null : Buffer.alloc(1)) + } + + r.on('end', function () { + t.ok(true, 'rended') + }) + + const w = new Stream() + w.writable = true + let writes = 0 + let buffered = 0 + w.write = function (c) { + writes += c.length + buffered += c.length + process.nextTick(drain) + return false + } + + function drain() { + if (buffered > 3) { + t.ok(false, 'to much buffer') + } + buffered = 0 + w.emit('drain') + } + + w.end = function () { + t.equal(writes, 255) + t.ok(true, 'wended') + } + + r.pipe(w) +}) diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js new file mode 100644 index 0000000000..bd1c29bc5d --- /dev/null +++ b/test/browser/test-stream2-readable-non-empty-end.js @@ -0,0 +1,58 @@ +'use strict' + +const test = require('tape') +const { Readable } = require('../../lib') + +test('non empty end', function (t) { + t.plan(4) + + let len = 0 + const chunks = new Array(10) + for (let i = 1; i <= 10; i++) { + chunks[i - 1] = Buffer.alloc(i) + len += i + } + + const test = new Readable() + let n = 0 + test._read = function (size) { + const chunk = chunks[n++] + setTimeout(function () { + test.push(chunk === undefined ? null : chunk) + }) + } + + test.on('end', thrower) + function thrower() { + throw new Error('this should not happen!') + } + + let bytesread = 0 + test.on('readable', function () { + const b = len - bytesread - 1 + const res = test.read(b) + if (res) { + bytesread += res.length + // console.error('br=%d len=%d', bytesread, len); + setTimeout(next) + } + test.read(0) + }) + test.read(0) + + function next() { + // now let's make 'end' happen + test.removeListener('end', thrower) + + test.on('end', function () { + t.ok(true, 'end emitted') + }) + + // one to get the last byte + let r = test.read() + t.ok(r) + t.equal(r.length, 1) + r = test.read() + t.equal(r, null) + } +}) diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js new file mode 100644 index 0000000000..ea870bc99a --- /dev/null +++ b/test/browser/test-stream2-readable-wrap-empty.js @@ -0,0 +1,23 @@ +'use strict' + +const test = require('tape') +const { EventEmitter: EE } = require('events') +const Readable = require('../../lib') + +test('wrap empty', function (t) { + t.plan(1) + + const oldStream = new EE() + oldStream.pause = function () {} + oldStream.resume = function () {} + + const newStream = new Readable().wrap(oldStream) + + newStream + .on('readable', function () {}) + .on('end', function () { + t.ok(true, 'ended') + }) + + oldStream.emit('end') +}) diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js new file mode 100644 index 0000000000..e98f039f40 --- /dev/null +++ b/test/browser/test-stream2-readable-wrap.js @@ -0,0 +1,94 @@ +'use strict' + +const test = require('tape') +const { EventEmitter: EE } = require('events') +const { Readable, Writable } = require('../../lib') + +let run = 0 + +function runTest(highWaterMark, objectMode, produce) { + test('run #' + ++run, (t) => { + t.plan(4) + + const old = new EE() + const r = new Readable({ highWaterMark: highWaterMark, objectMode: objectMode }) + t.equal(r, r.wrap(old)) + + let ended = false + r.on('end', function () { + ended = true + }) + + old.pause = function () { + // console.error('old.pause()'); + old.emit('pause') + flowing = false + } + + old.resume = function () { + // console.error('old.resume()'); + old.emit('resume') + flow() + } + + let flowing + let chunks = 10 + let oldEnded = false + const expected = [] + function flow() { + flowing = true + // eslint-disable-next-line no-unmodified-loop-condition + while (flowing && chunks-- > 0) { + const item = produce() + expected.push(item) + // console.log('old.emit', chunks, flowing); + old.emit('data', item) + // console.log('after emit', chunks, flowing); + } + if (chunks <= 0) { + oldEnded = true + // console.log('old end', chunks, flowing); + old.emit('end') + } + } + + const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode: objectMode }) + const written = [] + w._write = function (chunk, encoding, cb) { + // console.log('_write', chunk); + written.push(chunk) + setTimeout(cb) + } + + w.on('finish', function () { + performAsserts() + }) + + r.pipe(w) + + flow() + + function performAsserts() { + t.ok(ended) + t.ok(oldEnded) + t.deepEqual(written, expected) + } + }) +} + +runTest(100, false, function () { + return Buffer.alloc(100) +}) + +runTest(10, false, function () { + return Buffer.from('xxxxxxxxxx') +}) + +runTest(1, true, function () { + return { foo: 'bar' } +}) + +const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555] +runTest(1, true, function () { + return objectChunks.shift() +}) diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js new file mode 100644 index 0000000000..2b0de36be2 --- /dev/null +++ b/test/browser/test-stream2-set-encoding.js @@ -0,0 +1,335 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Readable } = require('../../lib') + +inherits(TestReader, Readable) + +function TestReader(n, opts) { + Readable.call(this, opts) + + this.pos = 0 + this.len = n || 100 +} + +TestReader.prototype._read = function (n) { + setTimeout( + function () { + if (this.pos >= this.len) { + // double push(null) to test eos handling + this.push(null) + return this.push(null) + } + + n = Math.min(n, this.len - this.pos) + if (n <= 0) { + // double push(null) to test eos handling + this.push(null) + return this.push(null) + } + + this.pos += n + const ret = Buffer.alloc(n) + ret.fill('a') + + // console.log('this.push(ret)', ret); + + return this.push(ret) + }.bind(this), + 1 + ) +} + +test('setEncoding utf8', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('utf8') + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('setEncoding hex', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('setEncoding hex with read(13)', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + + tr.on('readable', function flow() { + // console.log('readable once'); + let chunk + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + // console.log('END'); + t.same(out, expect) + }) +}) + +test('setEncoding base64', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('base64') + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: utf8', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'utf8' }) + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: hex', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'hex' }) + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: hex with read(13)', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'hex' }) + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('encoding: base64', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'base64' }) + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) +}) + +test('chainable', function (t) { + t.plan(1) + + const tr = new TestReader(100) + t.equal(tr.setEncoding('utf8'), tr) +}) diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js new file mode 100644 index 0000000000..f32063c4c7 --- /dev/null +++ b/test/browser/test-stream2-transform.js @@ -0,0 +1,484 @@ +'use strict' + +const test = require('tape') +const { PassThrough, Transform } = require('../../lib') + +test('writable side consumption', function (t) { + t.plan(3) + + const tx = new Transform({ + highWaterMark: 10 + }) + + let transformed = 0 + tx._transform = function (chunk, encoding, cb) { + transformed += chunk.length + tx.push(chunk) + cb() + } + + for (let i = 1; i <= 10; i++) { + tx.write(Buffer.alloc(i)) + } + tx.end() + + t.equal(tx._readableState.length, 10) + t.equal(transformed, 10) + t.same( + tx._writableState.getBuffer().map(function (c) { + return c.chunk.length + }), + [5, 6, 7, 8, 9, 10] + ) +}) + +test('passthrough', function (t) { + t.plan(4) + + const pt = new PassThrough() + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') +}) + +test('object passthrough', function (t) { + t.plan(7) + + const pt = new PassThrough({ objectMode: true }) + + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ a: 'b' }) + pt.end() + + t.equal(pt.read(), 1) + t.equal(pt.read(), true) + t.equal(pt.read(), false) + t.equal(pt.read(), 0) + t.equal(pt.read(), 'foo') + t.equal(pt.read(), '') + t.same(pt.read(), { a: 'b' }) +}) + +test('simple transform', function (t) { + t.plan(4) + + const pt = new Transform() + pt._transform = function (c, e, cb) { + const ret = Buffer.alloc(c.length) + ret.fill('x') + pt.push(ret) + cb() + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'x') +}) + +test('simple object transform', function (t) { + t.plan(7) + + const pt = new Transform({ objectMode: true }) + pt._transform = function (c, e, cb) { + pt.push(JSON.stringify(c)) + cb() + } + + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ a: 'b' }) + pt.end() + + t.equal(pt.read(), '1') + t.equal(pt.read(), 'true') + t.equal(pt.read(), 'false') + t.equal(pt.read(), '0') + t.equal(pt.read(), '"foo"') + t.equal(pt.read(), '""') + t.equal(pt.read(), '{"a":"b"}') +}) + +test('async passthrough', function (t) { + t.plan(4) + + const pt = new Transform() + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') + }) +}) + +test('assymetric transform (expand)', function (t) { + t.plan(7) + + const pt = new Transform() + + // emit each chunk 2 times. + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + }, 10) + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogf') + t.equal(pt.read(5).toString(), 'oogba') + t.equal(pt.read(5).toString(), 'rkbar') + t.equal(pt.read(5).toString(), 'kbazy') + t.equal(pt.read(5).toString(), 'bazyk') + t.equal(pt.read(5).toString(), 'uelku') + t.equal(pt.read(5).toString(), 'el') + }) +}) + +test('assymetric transform (compress)', function (t) { + t.plan(3) + + const pt = new Transform() + + // each output is the first char of 3 consecutive chunks, + // or whatever's left. + pt.state = '' + + pt._transform = function (chunk, encoding, cb) { + if (!chunk) { + chunk = '' + } + const s = chunk.toString() + setTimeout( + function () { + this.state += s.charAt(0) + if (this.state.length === 3) { + pt.push(Buffer.from(this.state)) + this.state = '' + } + cb() + }.bind(this), + 10 + ) + } + + pt._flush = function (cb) { + // just output whatever we have. + pt.push(Buffer.from(this.state)) + this.state = '' + cb() + } + + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.end() + + // 'abcdeabcdeabcd' + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcd') + }) +}) + +// this tests for a stall when data is written to a full stream +// that has empty transforms. +test('complex transform', function (t) { + t.plan(2) + + let count = 0 + let saved = null + const pt = new Transform({ highWaterMark: 3 }) + pt._transform = function (c, e, cb) { + if (count++ === 1) { + saved = c + } else { + if (saved) { + pt.push(saved) + saved = null + } + pt.push(c) + } + + cb() + } + + pt.once('readable', function () { + process.nextTick(function () { + pt.write(Buffer.from('d')) + pt.write(Buffer.from('ef'), function () { + pt.end() + }) + t.equal(pt.read().toString(), 'abcdef') + t.equal(pt.read(), null) + }) + }) + + pt.write(Buffer.from('abc')) +}) + +test('passthrough event emission', function (t) { + t.plan(11) + + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('>>> emit readable %d', emits); + emits++ + }) + + pt.write(Buffer.from('foog')) + + // console.error('need emit 0'); + pt.write(Buffer.from('bark')) + + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits) + t.equal(emits, 1) + + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5) + '', 'null') + + // console.error('need emit 1'); + + pt.write(Buffer.from('bazy')) + // console.error('should have emitted, but not again'); + pt.write(Buffer.from('kuel')) + + // console.error('should have emitted readable now 2 === %d', emits); + setTimeout(() => { + t.equal(emits, 2) + + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5), null) + + // console.error('need emit 2'); + + pt.end() + + setTimeout(() => { + t.equal(emits, 3) + + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) + + // console.error('should not have emitted again'); + t.equal(emits, 3) + }) + }) + }) +}) + +test('passthrough event emission reordered', function (t) { + t.plan(10) + + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('emit readable', emits); + emits++ + }) + + pt.write(Buffer.from('foog')) + // console.error('need emit 0'); + pt.write(Buffer.from('bark')) + + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits); + t.equal(emits, 1) + + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5), null) + + // console.error('need emit 1'); + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'arkba') + + t.equal(pt.read(5), null) + + // console.error('need emit 2'); + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5), null) + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) + t.equal(emits, 4) + }) + pt.end() + }) + pt.write(Buffer.from('kuel')) + }) + + pt.write(Buffer.from('bazy')) + }) +}) + +test('passthrough facaded', function (t) { + t.plan(1) + + // console.error('passthrough facaded'); + const pt = new PassThrough() + const datas = [] + pt.on('data', function (chunk) { + datas.push(chunk.toString()) + }) + + pt.on('end', function () { + t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) + }) + + pt.write(Buffer.from('foog')) + setTimeout(function () { + pt.write(Buffer.from('bark')) + setTimeout(function () { + pt.write(Buffer.from('bazy')) + setTimeout(function () { + pt.write(Buffer.from('kuel')) + setTimeout(function () { + pt.end() + }, 10) + }, 10) + }, 10) + }, 10) +}) + +test('object transform (json parse)', function (t) { + t.plan(5) + + // console.error('json parse stream'); + const jp = new Transform({ objectMode: true }) + jp._transform = function (data, encoding, cb) { + try { + jp.push(JSON.parse(data)) + cb() + } catch (er) { + cb(er) + } + } + + // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + + let ended = false + jp.on('end', function () { + ended = true + }) + + forEach(objects, function (obj) { + jp.write(JSON.stringify(obj)) + const res = jp.read() + t.same(res, obj) + }) + + jp.end() + // read one more time to get the 'end' event + jp.read() + + process.nextTick(function () { + t.ok(ended) + }) +}) + +test('object transform (json stringify)', function (t) { + t.plan(5) + + // console.error('json parse stream'); + const js = new Transform({ objectMode: true }) + js._transform = function (data, encoding, cb) { + try { + js.push(JSON.stringify(data)) + cb() + } catch (er) { + cb(er) + } + } + + // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + + let ended = false + js.on('end', function () { + ended = true + }) + + forEach(objects, function (obj) { + js.write(obj) + const res = js.read() + t.equal(res, JSON.stringify(obj)) + }) + + js.end() + // read one more time to get the 'end' event + js.read() + + process.nextTick(function () { + t.ok(ended) + }) +}) + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js new file mode 100644 index 0000000000..88ab66e5a4 --- /dev/null +++ b/test/browser/test-stream2-unpipe-drain.js @@ -0,0 +1,63 @@ +'use strict' + +const test = require('tape') +const crypto = require('crypto') +const inherits = require('inherits') +const stream = require('../../lib') + +test('unpipe drain', function (t) { + try { + crypto.randomBytes(9) + } catch (_) { + t.plan(1) + t.ok(true, 'does not suport random, skipping') + return + } + + t.plan(2) + + function TestWriter() { + stream.Writable.call(this) + } + inherits(TestWriter, stream.Writable) + + TestWriter.prototype._write = function (buffer, encoding, callback) { + // console.log('write called'); + // super slow write stream (callback never called) + } + + const dest = new TestWriter() + + function TestReader(id) { + stream.Readable.call(this) + this.reads = 0 + } + inherits(TestReader, stream.Readable) + + TestReader.prototype._read = function (size) { + this.reads += 1 + this.push(crypto.randomBytes(size)) + } + + const src1 = new TestReader() + const src2 = new TestReader() + + src1.pipe(dest) + + src1.once('readable', function () { + process.nextTick(function () { + src2.pipe(dest) + + src2.once('readable', function () { + process.nextTick(function () { + src1.unpipe(dest) + }) + }) + }) + }) + + dest.on('unpipe', function () { + t.equal(src1.reads, 2) + t.equal(src2.reads, 1) + }) +}) diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js new file mode 100644 index 0000000000..3cce73b00f --- /dev/null +++ b/test/browser/test-stream2-writable.js @@ -0,0 +1,430 @@ +'use strict' + +const test = require('tape') +const inherits = require('inherits') +const { Duplex, Writable } = require('../../lib') + +inherits(TestWriter, Writable) + +function TestWriter() { + Writable.apply(this, arguments) + this.buffer = [] + this.written = 0 +} + +TestWriter.prototype._write = function (chunk, encoding, cb) { + // simulate a small unpredictable latency + setTimeout( + function () { + this.buffer.push(chunk.toString()) + this.written += chunk.length + cb() + }.bind(this), + Math.floor(Math.random() * 10) + ) +} +inherits(Processstdout, Writable) + +function Processstdout() { + Writable.apply(this, arguments) + this.buffer = [] + this.written = 0 +} + +Processstdout.prototype._write = function (chunk, encoding, cb) { + // console.log(chunk.toString()); + cb() +} +const chunks = new Array(50) +for (let i = 0; i < chunks.length; i++) { + chunks[i] = new Array(i + 1).join('x') +} + +if (!process.stdout) { + process.stdout = new Processstdout() +} + +test('write fast', function (t) { + t.plan(1) + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) + + forEach(chunks, function (chunk) { + // screw backpressure. Just buffer it all up. + tw.write(chunk) + }) + tw.end() +}) + +test('write slow', function (t) { + t.plan(1) + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) + + let i = 0 + ;(function W() { + tw.write(chunks[i++]) + if (i < chunks.length) { + setTimeout(W, 10) + } else { + tw.end() + } + })() +}) + +test('write backpressure', function (t) { + t.plan(19) + + const tw = new TestWriter({ + highWaterMark: 50 + }) + + let drains = 0 + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.equal(drains, 17) + }) + + tw.on('drain', function () { + drains++ + }) + + let i = 0 + ;(function W() { + let ret + do { + ret = tw.write(chunks[i++]) + } while (ret !== false && i < chunks.length) + + if (i < chunks.length) { + t.ok(tw._writableState.length >= 50) + tw.once('drain', W) + } else { + tw.end() + } + })() +}) + +test('write bufferize', function (t) { + t.plan(50) + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) + + // Some combination of encoding and length result in the last byte replaced by two extra null bytes + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } + + t.same(actual, chunk, 'got the expected chunks ' + i) + }) + }) + + forEach(chunks, function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) + tw.end() +}) + +test('write no bufferize', function (t) { + t.plan(100) + + const tw = new TestWriter({ + highWaterMark: 100, + decodeStrings: false + }) + + tw._write = function (chunk, encoding, cb) { + t.equals(typeof chunk, 'string') + chunk = Buffer.from(chunk, encoding) + return TestWriter.prototype._write.call(this, chunk, encoding, cb) + } + + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) + + // Some combination of encoding and length result in the last byte replaced by two extra null bytes + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } + + t.same(actual, chunk, 'got the expected chunks ' + i) + }) + }) + + forEach(chunks, function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) + tw.end() +}) + +test('write callbacks', function (t) { + t.plan(2) + + const callbacks = chunks + .map(function (chunk, i) { + return [ + i, + function (er) { + callbacks._called[i] = chunk + } + ] + }) + .reduce(function (set, x) { + set['callback-' + x[0]] = x[1] + return set + }, {}) + callbacks._called = [] + + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + process.nextTick(function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.same(callbacks._called, chunks, 'called all callbacks') + }) + }) + + forEach(chunks, function (chunk, i) { + tw.write(chunk, callbacks['callback-' + i]) + }) + tw.end() +}) + +test('end callback', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end(() => { + t.ok(true) + }) +}) + +test('end callback with chunk', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end(Buffer.from('hello world'), () => { + t.ok(true) + }) +}) + +test('end callback with chunk and encoding', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end('hello world', 'ascii', () => { + t.ok(true) + }) +}) + +test('end callback after .write() call', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.write(Buffer.from('hello world')) + tw.end(() => { + t.ok(true) + }) +}) + +test('end callback called after write callback', function (t) { + t.plan(1) + + const tw = new TestWriter() + let writeCalledback = false + tw.write(Buffer.from('hello world'), function () { + writeCalledback = true + }) + tw.end(function () { + t.equal(writeCalledback, true) + }) +}) + +test('encoding should be ignored for buffers', function (t) { + t.plan(1) + + const tw = new Writable() + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' + tw._write = function (chunk, encoding, cb) { + t.equal(chunk.toString('hex'), hex) + } + const buf = Buffer.from(hex, 'hex') + tw.write(buf, 'binary') +}) + +test('writables are not pipable', function (t) { + t.plan(1) + + const w = new Writable({ autoDestroy: false }) + w._write = function () {} + let gotError = false + w.on('error', function (er) { + gotError = true + }) + w.pipe(process.stdout) + t.ok(gotError) +}) + +test('duplexes are pipable', function (t) { + t.plan(1) + + const d = new Duplex() + d._read = function () {} + d._write = function () {} + let gotError = false + d.on('error', function (er) { + gotError = true + }) + d.pipe(process.stdout) + t.notOk(gotError) +}) + +test('end(chunk) two times is an error', function (t) { + t.plan(2) + + const w = new Writable() + w._write = function () {} + let gotError = false + w.on('error', function (er) { + gotError = true + t.equal(er.message, 'write after end') + }) + w.end('this is the end') + w.end('and so is this') + process.nextTick(function () { + t.ok(gotError) + }) +}) + +test('dont end while writing', function (t) { + t.plan(2) + + const w = new Writable() + let wrote = false + w._write = function (chunk, e, cb) { + t.notOk(this.writing) + wrote = true + this.writing = true + setTimeout(function () { + this.writing = false + cb() + }) + } + w.on('finish', function () { + t.ok(wrote) + }) + w.write(Buffer.alloc(0)) + w.end() +}) + +test('finish does not come before write cb', function (t) { + t.plan(1) + + const w = new Writable() + let writeCb = false + w._write = function (chunk, e, cb) { + setTimeout(function () { + writeCb = true + cb() + }, 10) + } + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0)) + w.end() +}) + +test('finish does not come before sync _write cb', function (t) { + t.plan(1) + + const w = new Writable() + let writeCb = false + w._write = function (chunk, e, cb) { + cb() + } + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0), function (er) { + writeCb = true + }) + w.end() +}) + +test('finish is emitted if last chunk is empty', function (t) { + t.plan(1) + + const w = new Writable() + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } + w.on('finish', () => { + t.ok(true) + }) + + w.write(Buffer.alloc(1)) + w.end(Buffer.alloc(0)) +}) + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js new file mode 100644 index 0000000000..34aa272d88 --- /dev/null +++ b/test/browser/test-stream3-pause-then-read.js @@ -0,0 +1,147 @@ +'use strict' + +const test = require('tape') +const { Readable, Writable } = require('../../lib') + +test('pause then read', function (t) { + t.plan(7) + + const totalChunks = 100 + const chunkSize = 99 + const expectTotalData = totalChunks * chunkSize + let expectEndingData = expectTotalData + + const r = new Readable({ highWaterMark: 1000 }) + let chunks = totalChunks + r._read = function (n) { + if (!(chunks % 2)) { + setImmediate(push) + } else if (!(chunks % 3)) { + process.nextTick(push) + } else { + push() + } + } + + let totalPushed = 0 + function push() { + const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null + if (chunk) { + totalPushed += chunk.length + chunk.fill('x') + } + r.push(chunk) + } + + read100() + + // first we read 100 bytes + function read100() { + readn(100, onData) + } + + function readn(n, then) { + // console.error('read %d', n); + expectEndingData -= n + ;(function read() { + const c = r.read(n) + if (!c) { + r.once('readable', read) + } else { + t.equal(c.length, n) + t.notOk(r._readableState.flowing) + then() + } + })() + } + + // then we listen to some data events + function onData() { + expectEndingData -= 100 + // console.error('onData'); + let seen = 0 + r.on('data', function od(c) { + seen += c.length + if (seen >= 100) { + // seen enough + r.removeListener('data', od) + r.pause() + if (seen > 100) { + // oh no, seen too much! + // put the extra back. + const diff = seen - 100 + r.unshift(c.slice(c.length - diff)) + // console.error('seen too much', seen, diff) + } + + // Nothing should be lost in between + setImmediate(pipeLittle) + } + }) + } + + // Just pipe 200 bytes, then unshift the extra and unpipe + function pipeLittle() { + expectEndingData -= 200 + // console.error('pipe a little'); + const w = new Writable() + let written = 0 + w.on('finish', function () { + t.equal(written, 200) + setImmediate(read1234) + }) + w._write = function (chunk, encoding, cb) { + written += chunk.length + if (written >= 200) { + r.unpipe(w) + w.end() + cb() + if (written > 200) { + const diff = written - 200 + written -= diff + r.unshift(chunk.slice(chunk.length - diff)) + } + } else { + setImmediate(cb) + } + } + r.pipe(w) + } + + // now read 1234 more bytes + function read1234() { + readn(1234, resumePause) + } + + function resumePause() { + // console.error('resumePause'); + // don't read anything, just resume and re-pause a whole bunch + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + setImmediate(pipe) + } + + function pipe() { + // console.error('pipe the rest'); + const w = new Writable() + let written = 0 + w._write = function (chunk, encoding, cb) { + written += chunk.length + cb() + } + w.on('finish', function () { + // console.error('written', written, totalPushed); + t.equal(written, expectEndingData) + t.equal(totalPushed, expectTotalData) + }) + r.pipe(w) + } +}) diff --git a/test/common/fixtures.js b/test/common/fixtures.js new file mode 100644 index 0000000000..3ee87e8b2d --- /dev/null +++ b/test/common/fixtures.js @@ -0,0 +1,38 @@ +'use strict'; + +const path = require('path'); +const fs = require('fs'); +const { pathToFileURL } = require('url'); + +const fixturesDir = path.join(__dirname, '..', 'fixtures'); + +function fixturesPath(...args) { + return path.join(fixturesDir, ...args); +} + +function fixturesFileURL(...args) { + return pathToFileURL(fixturesPath(...args)); +} + +function readFixtureSync(args, enc) { + if (Array.isArray(args)) + return fs.readFileSync(fixturesPath(...args), enc); + return fs.readFileSync(fixturesPath(args), enc); +} + +function readFixtureKey(name, enc) { + return fs.readFileSync(fixturesPath('keys', name), enc); +} + +function readFixtureKeys(enc, ...names) { + return names.map((name) => readFixtureKey(name, enc)); +} + +module.exports = { + fixturesDir, + path: fixturesPath, + fileURL: fixturesFileURL, + readSync: readFixtureSync, + readKey: readFixtureKey, + readKeys: readFixtureKeys, +}; diff --git a/test/common/fixtures.mjs b/test/common/fixtures.mjs new file mode 100644 index 0000000000..d6f7f6c092 --- /dev/null +++ b/test/common/fixtures.mjs @@ -0,0 +1,17 @@ +import fixtures from './fixtures.js'; + +const { + fixturesDir, + path, + fileURL, + readSync, + readKey, +} = fixtures; + +export { + fixturesDir, + path, + fileURL, + readSync, + readKey, +}; diff --git a/test/common/index.js b/test/common/index.js new file mode 100644 index 0000000000..3646f7864f --- /dev/null +++ b/test/common/index.js @@ -0,0 +1,952 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +/* eslint-disable node-core/crypto-check */ +'use strict'; +const process = global.process; // Some tests tamper with the process global. + +const assert = require('assert'); +const { exec, execSync, spawnSync } = require('child_process'); +const fs = require('fs'); +// Do not require 'os' until needed so that test-os-checked-function can +// monkey patch it. If 'os' is required here, that test will fail. +const path = require('path'); +const util = require('util'); +const { isMainThread } = require('worker_threads'); + +const tmpdir = require('./tmpdir'); +const bits = ['arm64', 'mips', 'mipsel', 'ppc64', 'riscv64', 's390x', 'x64'] + .includes(process.arch) ? 64 : 32; +const hasIntl = !!process.config.variables.v8_enable_i18n_support; + +const { + atob, + btoa +} = require('buffer'); + +// Some tests assume a umask of 0o022 so set that up front. Tests that need a +// different umask will set it themselves. +// +// Workers can read, but not set the umask, so check that this is the main +// thread. +if (isMainThread) + process.umask(0o022); + +const noop = () => {}; + +const hasCrypto = Boolean(process.versions.openssl) && + !process.env.NODE_SKIP_CRYPTO; + +const hasOpenSSL3 = hasCrypto && + require('crypto').constants.OPENSSL_VERSION_NUMBER >= 805306368; + +const hasQuic = hasCrypto && !!process.config.variables.openssl_quic; + +// Check for flags. Skip this for workers (both, the `cluster` module and +// `worker_threads`) and child processes. +// If the binary was built without-ssl then the crypto flags are +// invalid (bad option). The test itself should handle this case. +if (process.argv.length === 2 && + !process.env.NODE_SKIP_FLAG_CHECK && + isMainThread && + hasCrypto && + require('cluster').isPrimary && + fs.existsSync(process.argv[1])) { + // The copyright notice is relatively big and the flags could come afterwards. + const bytesToRead = 1500; + const buffer = Buffer.allocUnsafe(bytesToRead); + const fd = fs.openSync(process.argv[1], 'r'); + const bytesRead = fs.readSync(fd, buffer, 0, bytesToRead); + fs.closeSync(fd); + const source = buffer.toString('utf8', 0, bytesRead); + + const flagStart = source.indexOf('// Flags: --') + 10; + if (flagStart !== 9) { + let flagEnd = source.indexOf('\n', flagStart); + // Normalize different EOL. + if (source[flagEnd - 1] === '\r') { + flagEnd--; + } + const flags = source + .substring(flagStart, flagEnd) + .replace(/_/g, '-') + .split(' '); + const args = process.execArgv.map((arg) => arg.replace(/_/g, '-')); + for (const flag of flags) { + if (!args.includes(flag) && + // If the binary is build without `intl` the inspect option is + // invalid. The test itself should handle this case. + (process.features.inspector || !flag.startsWith('--inspect'))) { + console.log( + 'NOTE: The test started as a child_process using these flags:', + util.inspect(flags), + 'Use NODE_SKIP_FLAG_CHECK to run the test with the original flags.' + ); + const args = [...flags, ...process.execArgv, ...process.argv.slice(1)]; + const options = { encoding: 'utf8', stdio: 'inherit' }; + const result = spawnSync(process.execPath, args, options); + if (result.signal) { + process.kill(0, result.signal); + } else { + process.exit(result.status); + } + } + } + } +} + +const isWindows = process.platform === 'win32'; +const isAIX = process.platform === 'aix'; +const isSunOS = process.platform === 'sunos'; +const isFreeBSD = process.platform === 'freebsd'; +const isOpenBSD = process.platform === 'openbsd'; +const isLinux = process.platform === 'linux'; +const isOSX = process.platform === 'darwin'; + +const isDumbTerminal = process.env.TERM === 'dumb'; + +const buildType = process.config.target_defaults ? + process.config.target_defaults.default_configuration : + 'Release'; + +// If env var is set then enable async_hook hooks for all tests. +if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { + const destroydIdsList = {}; + const destroyListList = {}; + const initHandles = {}; + const { internalBinding } = require('internal/test/binding'); + const async_wrap = internalBinding('async_wrap'); + + process.on('exit', () => { + // Iterate through handles to make sure nothing crashes + for (const k in initHandles) + util.inspect(initHandles[k]); + }); + + const _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId; + async_wrap.queueDestroyAsyncId = function queueDestroyAsyncId(id) { + if (destroyListList[id] !== undefined) { + process._rawDebug(destroyListList[id]); + process._rawDebug(); + throw new Error(`same id added to destroy list twice (${id})`); + } + destroyListList[id] = util.inspect(new Error()); + _queueDestroyAsyncId(id); + }; + + require('async_hooks').createHook({ + init(id, ty, tr, resource) { + if (initHandles[id]) { + process._rawDebug( + `Is same resource: ${resource === initHandles[id].resource}`); + process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`); + throw new Error(`init called twice for same id (${id})`); + } + initHandles[id] = { + resource, + stack: util.inspect(new Error()).substr(6) + }; + }, + before() { }, + after() { }, + destroy(id) { + if (destroydIdsList[id] !== undefined) { + process._rawDebug(destroydIdsList[id]); + process._rawDebug(); + throw new Error(`destroy called for same id (${id})`); + } + destroydIdsList[id] = util.inspect(new Error()); + }, + }).enable(); +} + +let opensslCli = null; +let inFreeBSDJail = null; +let localhostIPv4 = null; + +const localIPv6Hosts = + isLinux ? [ + // Debian/Ubuntu + 'ip6-localhost', + 'ip6-loopback', + + // SUSE + 'ipv6-localhost', + 'ipv6-loopback', + + // Typically universal + 'localhost', + ] : [ 'localhost' ]; + +const PIPE = (() => { + const localRelative = path.relative(process.cwd(), `${tmpdir.path}/`); + const pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative; + const pipeName = `node-test.${process.pid}.sock`; + return path.join(pipePrefix, pipeName); +})(); + +// Check that when running a test with +// `$node --abort-on-uncaught-exception $file child` +// the process aborts. +function childShouldThrowAndAbort() { + let testCmd = ''; + if (!isWindows) { + // Do not create core files, as it can take a lot of disk space on + // continuous testing and developers' machines + testCmd += 'ulimit -c 0 && '; + } + testCmd += `"${process.argv[0]}" --abort-on-uncaught-exception `; + testCmd += `"${process.argv[1]}" child`; + const child = exec(testCmd); + child.on('exit', function onExit(exitCode, signal) { + const errMsg = 'Test should have aborted ' + + `but instead exited with exit code ${exitCode}` + + ` and signal ${signal}`; + assert(nodeProcessAborted(exitCode, signal), errMsg); + }); +} + +function createZeroFilledFile(filename) { + const fd = fs.openSync(filename, 'w'); + fs.ftruncateSync(fd, 10 * 1024 * 1024); + fs.closeSync(fd); +} + + +const pwdCommand = isWindows ? + ['cmd.exe', ['/d', '/c', 'cd']] : + ['pwd', []]; + + +function platformTimeout(ms) { + const multipliers = typeof ms === 'bigint' ? + { two: 2n, four: 4n, seven: 7n } : { two: 2, four: 4, seven: 7 }; + + if (process.features.debug) + ms = multipliers.two * ms; + + if (isAIX) + return multipliers.two * ms; // Default localhost speed is slower on AIX + + if (process.arch !== 'arm') + return ms; + + const armv = process.config.variables.arm_version; + + if (armv === '7') + return multipliers.two * ms; // ARMv7 + + return ms; // ARMv8+ +} + + + let knownGlobals = [ + + typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), + typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, + typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, + typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, + atob, + btoa, + clearImmediate, + clearInterval, + clearTimeout, + global, + setImmediate, + setInterval, + setTimeout, + queueMicrotask, +]; + +// TODO(@jasnell): This check can be temporary. AbortController is +// not currently supported in either Node.js 12 or 10, making it +// difficult to run tests comparatively on those versions. Once +// all supported versions have AbortController as a global, this +// check can be removed and AbortController can be added to the +// knownGlobals list above. +if (global.AbortController) + knownGlobals.push(global.AbortController); + +if (global.gc) { + knownGlobals.push(global.gc); +} + +if (global.performance) { + knownGlobals.push(global.performance); +} +if (global.PerformanceMark) { + knownGlobals.push(global.PerformanceMark); +} +if (global.PerformanceMeasure) { + knownGlobals.push(global.PerformanceMeasure); +} + +// TODO(@ethan-arrowood): Similar to previous checks, this can be temporary +// until v16.x is EOL. Once all supported versions have structuredClone we +// can add this to the list above instead. +if (global.structuredClone) { + knownGlobals.push(global.structuredClone); +} + +if (global.fetch) { + knownGlobals.push( + global.fetch, + global.FormData, + global.Request, + global.Response, + global.Headers, + ); +} +if (hasCrypto && global.crypto) { + knownGlobals.push(global.crypto); + knownGlobals.push(global.Crypto); + knownGlobals.push(global.CryptoKey); + knownGlobals.push(global.SubtleCrypto); +} + +function allowGlobals(...allowlist) { + knownGlobals = knownGlobals.concat(allowlist); +} + +if (process.env.NODE_TEST_KNOWN_GLOBALS !== '0') { + if (process.env.NODE_TEST_KNOWN_GLOBALS) { + const knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(','); + allowGlobals(...knownFromEnv); + } + + function leakedGlobals() { + const leaked = []; + + for (const val in global) { + if (!knownGlobals.includes(global[val])) { + leaked.push(val); + } + } + + return leaked; + } + + process.on('exit', function() { + const leaked = leakedGlobals(); + if (leaked.length > 0) { + assert.fail(`Unexpected global(s) found: ${leaked.join(', ')}`); + } + }); +} + +const mustCallChecks = []; + +function runCallChecks(exitCode) { + if (exitCode !== 0) return; + + const failed = mustCallChecks.filter(function(context) { + if ('minimum' in context) { + context.messageSegment = `at least ${context.minimum}`; + return context.actual < context.minimum; + } + context.messageSegment = `exactly ${context.exact}`; + return context.actual !== context.exact; + }); + + failed.forEach(function(context) { + console.log('Mismatched %s function calls. Expected %s, actual %d.', + context.name, + context.messageSegment, + context.actual); + console.log(context.stack.split('\n').slice(2).join('\n')); + }); + + if (failed.length) process.exit(1); +} + +function mustCall(fn, exact) { + return _mustCallInner(fn, exact, 'exact'); +} + +function mustSucceed(fn, exact) { + return mustCall(function(err, ...args) { + assert.ifError(err); + if (typeof fn === 'function') + return fn.apply(this, args); + }, exact); +} + +function mustCallAtLeast(fn, minimum) { + return _mustCallInner(fn, minimum, 'minimum'); +} + +function _mustCallInner(fn, criteria = 1, field) { + if (process._exiting) + throw new Error('Cannot use common.mustCall*() in process exit handler'); + if (typeof fn === 'number') { + criteria = fn; + fn = noop; + } else if (fn === undefined) { + fn = noop; + } + + if (typeof criteria !== 'number') + throw new TypeError(`Invalid ${field} value: ${criteria}`); + + const context = { + [field]: criteria, + actual: 0, + stack: util.inspect(new Error()), + name: fn.name || '' + }; + + // Add the exit listener only once to avoid listener leak warnings + if (mustCallChecks.length === 0) process.on('exit', runCallChecks); + + mustCallChecks.push(context); + + const _return = function() { // eslint-disable-line func-style + context.actual++; + return fn.apply(this, arguments); + }; + // Function instances have own properties that may be relevant. + // Let's replicate those properties to the returned function. + // Refs: https://tc39.es/ecma262/#sec-function-instances + Object.defineProperties(_return, { + name: { + value: fn.name, + writable: false, + enumerable: false, + configurable: true, + }, + length: { + value: fn.length, + writable: false, + enumerable: false, + configurable: true, + }, + }); + return _return; +} + +function hasMultiLocalhost() { + const { internalBinding } = require('internal/test/binding'); + const { TCP, constants: TCPConstants } = internalBinding('tcp_wrap'); + const t = new TCP(TCPConstants.SOCKET); + const ret = t.bind('127.0.0.2', 0); + t.close(); + return ret === 0; +} + +function skipIfEslintMissing() { + if (!fs.existsSync( + path.join(__dirname, '..', '..', 'tools', 'node_modules', 'eslint') + )) { + skip('missing ESLint'); + } +} + +function canCreateSymLink() { + // On Windows, creating symlinks requires admin privileges. + // We'll only try to run symlink test if we have enough privileges. + // On other platforms, creating symlinks shouldn't need admin privileges + if (isWindows) { + // whoami.exe needs to be the one from System32 + // If unix tools are in the path, they can shadow the one we want, + // so use the full path while executing whoami + const whoamiPath = path.join(process.env.SystemRoot, + 'System32', 'whoami.exe'); + + try { + const output = execSync(`${whoamiPath} /priv`, { timeout: 1000 }); + return output.includes('SeCreateSymbolicLinkPrivilege'); + } catch { + return false; + } + } + // On non-Windows platforms, this always returns `true` + return true; +} + +function getCallSite(top) { + const originalStackFormatter = Error.prepareStackTrace; + Error.prepareStackTrace = (err, stack) => + `${stack[0].getFileName()}:${stack[0].getLineNumber()}`; + const err = new Error(); + Error.captureStackTrace(err, top); + // With the V8 Error API, the stack is not formatted until it is accessed + err.stack; // eslint-disable-line no-unused-expressions + Error.prepareStackTrace = originalStackFormatter; + return err.stack; +} + +function mustNotCall(msg) { + const callSite = getCallSite(mustNotCall); + return function mustNotCall(...args) { + const argsInfo = args.length > 0 ? + `\ncalled with arguments: ${args.map(util.inspect).join(', ')}` : ''; + assert.fail( + `${msg || 'function should not have been called'} at ${callSite}` + + argsInfo); + }; +} + +function printSkipMessage(msg) { + console.log(`1..0 # Skipped: ${msg}`); +} + +function skip(msg) { + printSkipMessage(msg); + process.exit(0); +} + +// Returns true if the exit code "exitCode" and/or signal name "signal" +// represent the exit code and/or signal name of a node process that aborted, +// false otherwise. +function nodeProcessAborted(exitCode, signal) { + // Depending on the compiler used, node will exit with either + // exit code 132 (SIGILL), 133 (SIGTRAP) or 134 (SIGABRT). + let expectedExitCodes = [132, 133, 134]; + + // On platforms using KSH as the default shell (like SmartOS), + // when a process aborts, KSH exits with an exit code that is + // greater than 256, and thus the exit code emitted with the 'exit' + // event is null and the signal is set to either SIGILL, SIGTRAP, + // or SIGABRT (depending on the compiler). + const expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT']; + + // On Windows, 'aborts' are of 2 types, depending on the context: + // (i) Exception breakpoint, if --abort-on-uncaught-exception is on + // which corresponds to exit code 2147483651 (0x80000003) + // (ii) Otherwise, _exit(134) which is called in place of abort() due to + // raising SIGABRT exiting with ambiguous exit code '3' by default + if (isWindows) + expectedExitCodes = [0x80000003, 134]; + + // When using --abort-on-uncaught-exception, V8 will use + // base::OS::Abort to terminate the process. + // Depending on the compiler used, the shell or other aspects of + // the platform used to build the node binary, this will actually + // make V8 exit by aborting or by raising a signal. In any case, + // one of them (exit code or signal) needs to be set to one of + // the expected exit codes or signals. + if (signal !== null) { + return expectedSignals.includes(signal); + } + return expectedExitCodes.includes(exitCode); +} + +function isAlive(pid) { + try { + process.kill(pid, 'SIGCONT'); + return true; + } catch { + return false; + } +} + +function _expectWarning(name, expected, code) { + if (typeof expected === 'string') { + expected = [[expected, code]]; + } else if (!Array.isArray(expected)) { + expected = Object.entries(expected).map(([a, b]) => [b, a]); + } else if (!(Array.isArray(expected[0]))) { + expected = [[expected[0], expected[1]]]; + } + // Deprecation codes are mandatory, everything else is not. + if (name === 'DeprecationWarning') { + expected.forEach(([_, code]) => assert(code, expected)); + } + return mustCall((warning) => { + const expectedProperties = expected.shift(); + if (!expectedProperties) { + assert.fail(`Unexpected extra warning received: ${warning}`); + } + const [ message, code ] = expectedProperties; + assert.strictEqual(warning.name, name); + if (typeof message === 'string') { + assert.strictEqual(warning.message, message); + } else { + assert.match(warning.message, message); + } + assert.strictEqual(warning.code, code); + }, expected.length); +} + +let catchWarning; + +// Accepts a warning name and description or array of descriptions or a map of +// warning names to description(s) ensures a warning is generated for each +// name/description pair. +// The expected messages have to be unique per `expectWarning()` call. +function expectWarning(nameOrMap, expected, code) { + if (catchWarning === undefined) { + catchWarning = {}; + process.on('warning', (warning) => { + if (!catchWarning[warning.name]) { + throw new TypeError( + `"${warning.name}" was triggered without being expected.\n` + + util.inspect(warning) + ); + } + catchWarning[warning.name](warning); + }); + } + if (typeof nameOrMap === 'string') { + catchWarning[nameOrMap] = _expectWarning(nameOrMap, expected, code); + } else { + Object.keys(nameOrMap).forEach((name) => { + catchWarning[name] = _expectWarning(name, nameOrMap[name]); + }); + } +} + +// Useful for testing expected internal/error objects +function expectsError(validator, exact) { + return mustCall((...args) => { + if (args.length !== 1) { + // Do not use `assert.strictEqual()` to prevent `inspect` from + // always being called. + assert.fail(`Expected one argument, got ${util.inspect(args)}`); + } + const error = args.pop(); + const descriptor = Object.getOwnPropertyDescriptor(error, 'message'); + // The error message should be non-enumerable + assert.strictEqual(descriptor.enumerable, false); + + assert.throws(() => { throw error; }, validator); + return true; + }, exact); +} + +function skipIfInspectorDisabled() { + if (!process.features.inspector) { + skip('V8 inspector is disabled'); + } +} + +function skipIf32Bits() { + if (bits < 64) { + skip('The tested feature is not available in 32bit builds'); + } +} + +function skipIfWorker() { + if (!isMainThread) { + skip('This test only works on a main thread'); + } +} + +function getArrayBufferViews(buf) { + const { buffer, byteOffset, byteLength } = buf; + + const out = []; + + const arrayBufferViews = [ + Int8Array, + Uint8Array, + Uint8ClampedArray, + Int16Array, + Uint16Array, + Int32Array, + Uint32Array, + Float32Array, + Float64Array, + DataView, + ]; + + for (const type of arrayBufferViews) { + const { BYTES_PER_ELEMENT = 1 } = type; + if (byteLength % BYTES_PER_ELEMENT === 0) { + out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT)); + } + } + return out; +} + +function getBufferSources(buf) { + return [...getArrayBufferViews(buf), new Uint8Array(buf).buffer]; +} + +function getTTYfd() { + // Do our best to grab a tty fd. + const tty = require('tty'); + // Don't attempt fd 0 as it is not writable on Windows. + // Ref: ef2861961c3d9e9ed6972e1e84d969683b25cf95 + const ttyFd = [1, 2, 4, 5].find(tty.isatty); + if (ttyFd === undefined) { + try { + return fs.openSync('/dev/tty'); + } catch { + // There aren't any tty fd's available to use. + return -1; + } + } + return ttyFd; +} + +function runWithInvalidFD(func) { + let fd = 1 << 30; + // Get first known bad file descriptor. 1 << 30 is usually unlikely to + // be an valid one. + try { + while (fs.fstatSync(fd--) && fd > 0); + } catch { + return func(fd); + } + + printSkipMessage('Could not generate an invalid fd'); +} + +// A helper function to simplify checking for ERR_INVALID_ARG_TYPE output. +function invalidArgTypeHelper(input) { + if (input == null) { + return ` Received ${input}`; + } + if (typeof input === 'function' && input.name) { + return ` Received function ${input.name}`; + } + if (typeof input === 'object') { + if (input.constructor && input.constructor.name) { + return ` Received an instance of ${input.constructor.name}`; + } + return ` Received ${util.inspect(input, { depth: -1 })}`; + } + let inspected = util.inspect(input, { colors: false }); + if (inspected.length > 25) + inspected = `${inspected.slice(0, 25)}...`; + return ` Received type ${typeof input} (${inspected})`; +} + +function skipIfDumbTerminal() { + if (isDumbTerminal) { + skip('skipping - dumb terminal'); + } +} + +function gcUntil(name, condition) { + if (typeof name === 'function') { + condition = name; + name = undefined; + } + return new Promise((resolve, reject) => { + let count = 0; + function gcAndCheck() { + setImmediate(() => { + count++; + global.gc(); + if (condition()) { + resolve(); + } else if (count < 10) { + gcAndCheck(); + } else { + reject(name === undefined ? undefined : 'Test ' + name + ' failed'); + } + }); + } + gcAndCheck(); + }); +} + +function requireNoPackageJSONAbove(dir = __dirname) { + let possiblePackage = path.join(dir, '..', 'package.json'); + let lastPackage = null; + while (possiblePackage !== lastPackage) { + if (fs.existsSync(possiblePackage)) { + assert.fail( + 'This test shouldn\'t load properties from a package.json above ' + + `its file location. Found package.json at ${possiblePackage}.`); + } + lastPackage = possiblePackage; + possiblePackage = path.join(possiblePackage, '..', '..', 'package.json'); + } +} + +const common = { + allowGlobals, + buildType, + canCreateSymLink, + childShouldThrowAndAbort, + createZeroFilledFile, + expectsError, + expectWarning, + gcUntil, + getArrayBufferViews, + getBufferSources, + getCallSite, + getTTYfd, + hasIntl, + hasCrypto, + hasOpenSSL3, + hasQuic, + hasMultiLocalhost, + invalidArgTypeHelper, + isAIX, + isAlive, + isDumbTerminal, + isFreeBSD, + isLinux, + isMainThread, + isOpenBSD, + isOSX, + isSunOS, + isWindows, + localIPv6Hosts, + mustCall, + mustCallAtLeast, + mustNotCall, + mustSucceed, + nodeProcessAborted, + PIPE, + platformTimeout, + printSkipMessage, + pwdCommand, + requireNoPackageJSONAbove, + runWithInvalidFD, + skip, + skipIf32Bits, + skipIfDumbTerminal, + skipIfEslintMissing, + skipIfInspectorDisabled, + skipIfWorker, + + get enoughTestMem() { + return require('os').totalmem() > 0x70000000; /* 1.75 Gb */ + }, + + get hasFipsCrypto() { + return hasCrypto && require('crypto').getFips(); + }, + + get hasIPv6() { + const iFaces = require('os').networkInterfaces(); + const re = isWindows ? /Loopback Pseudo-Interface/ : /lo/; + return Object.keys(iFaces).some((name) => { + return re.test(name) && + iFaces[name].some(({ family }) => family === 'IPv6'); + }); + }, + + get inFreeBSDJail() { + if (inFreeBSDJail !== null) return inFreeBSDJail; + + if (exports.isFreeBSD && + execSync('sysctl -n security.jail.jailed').toString() === '1\n') { + inFreeBSDJail = true; + } else { + inFreeBSDJail = false; + } + return inFreeBSDJail; + }, + + // On IBMi, process.platform and os.platform() both return 'aix', + // It is not enough to differentiate between IBMi and real AIX system. + get isIBMi() { + return require('os').type() === 'OS400'; + }, + + get isLinuxPPCBE() { + return (process.platform === 'linux') && (process.arch === 'ppc64') && + (require('os').endianness() === 'BE'); + }, + + get localhostIPv4() { + if (localhostIPv4 !== null) return localhostIPv4; + + if (this.inFreeBSDJail) { + // Jailed network interfaces are a bit special - since we need to jump + // through loops, as well as this being an exception case, assume the + // user will provide this instead. + if (process.env.LOCALHOST) { + localhostIPv4 = process.env.LOCALHOST; + } else { + console.error('Looks like we\'re in a FreeBSD Jail. ' + + 'Please provide your default interface address ' + + 'as LOCALHOST or expect some tests to fail.'); + } + } + + if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1'; + + return localhostIPv4; + }, + + // opensslCli defined lazily to reduce overhead of spawnSync + get opensslCli() { + if (opensslCli !== null) return opensslCli; + + if (process.config.variables.node_shared_openssl) { + // Use external command + opensslCli = 'openssl'; + } else { + // Use command built from sources included in Node.js repository + opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli'); + } + + if (exports.isWindows) opensslCli += '.exe'; + + const opensslCmd = spawnSync(opensslCli, ['version']); + if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) { + // OpenSSL command cannot be executed + opensslCli = false; + } + return opensslCli; + }, + + get PORT() { + if (+process.env.TEST_PARALLEL) { + throw new Error('common.PORT cannot be used in a parallelized test'); + } + return +process.env.NODE_COMMON_PORT || 12346; + }, + + /** + * Returns the EOL character used by this Git checkout. + */ + get checkoutEOL() { + return fs.readFileSync(__filename).includes('\r\n') ? '\r\n' : '\n'; + }, +}; + +const validProperties = new Set(Object.keys(common)); +module.exports = new Proxy(common, { + get(obj, prop) { + if (!validProperties.has(prop)) + throw new Error(`Using invalid common property: '${prop}'`); + return obj[prop]; + } +}); + + /* replacement start */ + if (typeof EventTarget === 'undefined') { + globalThis.EventTarget = require('event-target-shim').EventTarget; + } + + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; + } + + if (typeof AbortSignal === 'undefined') { + globalThis.AbortSignal = require('abort-controller').AbortSignal; + + globalThis.AbortSignal.abort = function() { + const controller = new AbortController(); + controller.abort(); + + return controller.signal; + } + } + /* replacement end */ diff --git a/test/common/index.mjs b/test/common/index.mjs new file mode 100644 index 0000000000..ec181dcacb --- /dev/null +++ b/test/common/index.mjs @@ -0,0 +1,97 @@ +import { createRequire } from 'module'; + +const require = createRequire(import.meta.url); +const common = require('./index.js'); + +const { + isMainThread, + isWindows, + isAIX, + isIBMi, + isLinuxPPCBE, + isSunOS, + isDumbTerminal, + isFreeBSD, + isOpenBSD, + isLinux, + isOSX, + enoughTestMem, + buildType, + localIPv6Hosts, + opensslCli, + PIPE, + hasCrypto, + hasIPv6, + childShouldThrowAndAbort, + createZeroFilledFile, + platformTimeout, + allowGlobals, + mustCall, + mustCallAtLeast, + mustSucceed, + hasMultiLocalhost, + skipIfDumbTerminal, + skipIfEslintMissing, + canCreateSymLink, + getCallSite, + mustNotCall, + printSkipMessage, + skip, + nodeProcessAborted, + isAlive, + expectWarning, + expectsError, + skipIfInspectorDisabled, + skipIf32Bits, + getArrayBufferViews, + getBufferSources, + getTTYfd, + runWithInvalidFD +} = common; + +export { + isMainThread, + isWindows, + isAIX, + isIBMi, + isLinuxPPCBE, + isSunOS, + isDumbTerminal, + isFreeBSD, + isOpenBSD, + isLinux, + isOSX, + enoughTestMem, + buildType, + localIPv6Hosts, + opensslCli, + PIPE, + hasCrypto, + hasIPv6, + childShouldThrowAndAbort, + createZeroFilledFile, + platformTimeout, + allowGlobals, + mustCall, + mustCallAtLeast, + mustSucceed, + hasMultiLocalhost, + skipIfDumbTerminal, + skipIfEslintMissing, + canCreateSymLink, + getCallSite, + mustNotCall, + printSkipMessage, + skip, + nodeProcessAborted, + isAlive, + expectWarning, + expectsError, + skipIfInspectorDisabled, + skipIf32Bits, + getArrayBufferViews, + getBufferSources, + getTTYfd, + runWithInvalidFD, + createRequire +}; diff --git a/test/common/tmpdir.js b/test/common/tmpdir.js new file mode 100644 index 0000000000..0bafea1582 --- /dev/null +++ b/test/common/tmpdir.js @@ -0,0 +1,61 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const { isMainThread } = require('worker_threads'); + +function rmSync(pathname) { + fs.rmSync(pathname, { maxRetries: 3, recursive: true, force: true }); +} + +const testRoot = process.env.NODE_TEST_DIR ? + fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..'); + +// Using a `.` prefixed name, which is the convention for "hidden" on POSIX, +// gets tools to ignore it by default or by simple rules, especially eslint. +const tmpdirName = '.tmp.' + + (process.env.TEST_SERIAL_ID || process.env.TEST_THREAD_ID || '0'); +const tmpPath = path.join(testRoot, tmpdirName); + +let firstRefresh = true; +function refresh() { + rmSync(this.path); + fs.mkdirSync(this.path); + + if (firstRefresh) { + firstRefresh = false; + // Clean only when a test uses refresh. This allows for child processes to + // use the tmpdir and only the parent will clean on exit. + process.on('exit', onexit); + } +} + +function onexit() { + // Change directory to avoid possible EBUSY + if (isMainThread) + process.chdir(testRoot); + + try { + rmSync(tmpPath); + } catch (e) { + console.error('Can\'t clean tmpdir:', tmpPath); + + const files = fs.readdirSync(tmpPath); + console.error('Files blocking:', files); + + if (files.some((f) => f.startsWith('.nfs'))) { + // Warn about NFS "silly rename" + console.error('Note: ".nfs*" might be files that were open and ' + + 'unlinked but not closed.'); + console.error('See http://nfs.sourceforge.net/#faq_d2 for details.'); + } + + console.error(); + throw e; + } +} + +module.exports = { + path: tmpPath, + refresh +}; diff --git a/test/fixtures/elipses.txt b/test/fixtures/elipses.txt new file mode 100644 index 0000000000..6105600505 --- /dev/null +++ b/test/fixtures/elipses.txt @@ -0,0 +1 @@ +………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………… \ No newline at end of file diff --git a/test/fixtures/empty-with-bom.txt b/test/fixtures/empty-with-bom.txt new file mode 100644 index 0000000000..5f282702bb --- /dev/null +++ b/test/fixtures/empty-with-bom.txt @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/fixtures/empty.txt b/test/fixtures/empty.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/fixtures/file-to-read-with-bom.txt b/test/fixtures/file-to-read-with-bom.txt new file mode 100644 index 0000000000..d46c8708d9 --- /dev/null +++ b/test/fixtures/file-to-read-with-bom.txt @@ -0,0 +1,3 @@ +abc +def +ghi diff --git a/test/fixtures/file-to-read-without-bom.txt b/test/fixtures/file-to-read-without-bom.txt new file mode 100644 index 0000000000..8edb37e36d --- /dev/null +++ b/test/fixtures/file-to-read-without-bom.txt @@ -0,0 +1,3 @@ +abc +def +ghi diff --git a/test/fixtures/outside.txt b/test/fixtures/outside.txt new file mode 100644 index 0000000000..044c4b9614 --- /dev/null +++ b/test/fixtures/outside.txt @@ -0,0 +1,2 @@ +this file is part of the WASI tests. it exists outside of the sandbox, and +should be inaccessible from the WASI tests. diff --git a/test/fixtures/readfile_pipe_test.txt b/test/fixtures/readfile_pipe_test.txt new file mode 100644 index 0000000000..65975655dc --- /dev/null +++ b/test/fixtures/readfile_pipe_test.txt @@ -0,0 +1,5 @@ +xxxx xxxx xxxx xxxx +xxxx xxxx xxxx xxxx +xxxx xxxx xxxx xxxx +xxxx xxxx xxxx xxxx +xxxx xxxx xxxx xxxx diff --git a/test/fixtures/tls-session-ticket.txt b/test/fixtures/tls-session-ticket.txt new file mode 100644 index 0000000000..bc0f6b58e1 --- /dev/null +++ b/test/fixtures/tls-session-ticket.txt @@ -0,0 +1,23 @@ +-----BEGIN SSL SESSION PARAMETERS----- +MIID2wIBAQICAwEEAgA1BCAMjLe+70uBSPGvybkTnPVUMwdbdtVbkMIXf8L5M8Kl +VAQwog+Afs00cnYUcgD1BQewJyxX1e561oRuDTpy7BHABC1hC7hxTaul+pwv+cBx +8D72oQYCBFFQF3OiBAICASyjggNhMIIDXTCCAkWgAwIBAgIJAMUSOvlaeyQHMA0G +CSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRl +MSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTAxMTE2MDkz +MjQ5WhcNMTMxMTE1MDkzMjQ5WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29t +ZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+LXZOjcQCJq3+ZKUFabj71oo/ex +/XsBcFqtBThjjTw9CVEVwfPQQp4XwtPiB204vnYXwQ1/R2NdTQqCZu47l79LssL/ +u2a5Y9+0NEU3nQA5qdt+1FAE0c5oexPimXOrR3GWfKz7PmZ2O0117IeCUUXPG5U8 +umhDe/4mDF4ZNJiKc404WthquTqgS7rLQZHhZ6D0EnGnOkzlmxJMYPNHSOY1/6iv +dNUUcC87awNEA3lgfhy25IyBK3QJc+aYKNTbt70Lery3bu2wWLFGtmNiGlQTS4Js +xImRsECTI727ObS7/FWAQsqW+COL0Sa5BuMFrFIpjPrEe0ih7vRRbdmXRwIDAQAB +o1AwTjAdBgNVHQ4EFgQUDnV4d6mDtOnluLoCjkUHTX/n4agwHwYDVR0jBBgwFoAU +DnV4d6mDtOnluLoCjkUHTX/n4agwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUF +AAOCAQEAFwV4MQfTo+qMv9JMiynoIEiqfOz4RgtmBqRnXUffcjS2dhc7/z+FPZnM +79Kej8eLHoVfxCyWRHFlzm93vEdvwxOCrD13EDOi08OOZfxWyIlCa6Bg8cMAKqQz +d2OvQOWqlRWBTThBJIhWflU33izXQn5GdmYqhfpc+9ZHHGhvXNydtRQkdxVK2dZN +zLBvBlLlRmtoClU7xm3A+/5dddePAQHEPtyFlUw49VYtZ3ru6KqPms7MKvcRhYLs +y9rwSfuuniMlx4d0bDR7TOkw0QQSA0N8MGQRQpzl4mw4jLzyM5d5QtuGBh2P6hPG +a0YQxtI3RPT/p6ENzzBiAKXiSfzox6QCBAClAwIBEg== +-----END SSL SESSION PARAMETERS----- diff --git a/test/fixtures/x.txt b/test/fixtures/x.txt new file mode 100644 index 0000000000..cd470e6190 --- /dev/null +++ b/test/fixtures/x.txt @@ -0,0 +1 @@ +xyz diff --git a/test/fixtures/x1024.txt b/test/fixtures/x1024.txt new file mode 100644 index 0000000000..c6a9d2f1a5 --- /dev/null +++ b/test/fixtures/x1024.txt @@ -0,0 +1 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx \ No newline at end of file diff --git a/test/ours/test-errors.js b/test/ours/test-errors.js new file mode 100644 index 0000000000..079e1017a3 --- /dev/null +++ b/test/ours/test-errors.js @@ -0,0 +1,132 @@ +'use strict' + +const t = require('tap') +const { codes: errors } = require('../../lib/internal/errors') + +function checkError(err, Base, name, code, message) { + t.ok(err instanceof Base) + t.equal(err.name, name) + t.equal(err.code, code) + t.equal(err.message, message) +} + +// Update this numbers based on the number of checkError below multiplied by the assertions within checkError +t.plan(17 * 4) + +checkError( + new errors.ERR_INVALID_ARG_VALUE('name', 0), + TypeError, + 'TypeError', + 'ERR_INVALID_ARG_VALUE', + "The argument 'name' is invalid. Received 0" +) + +checkError( + new errors.ERR_INVALID_ARG_VALUE('name', undefined), + TypeError, + 'TypeError', + 'ERR_INVALID_ARG_VALUE', + "The argument 'name' is invalid. Received undefined" +) + +checkError( + new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0), + TypeError, + 'TypeError', + 'ERR_INVALID_ARG_TYPE', + 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)' +) + +checkError( + new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'), + TypeError, + 'TypeError', + 'ERR_INVALID_ARG_TYPE', + "The first argument must be not string. Received type string ('foo')" +) + +checkError( + new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined), + TypeError, + 'TypeError', + 'ERR_INVALID_ARG_TYPE', + 'The "obj.prop" property must be of type string. Received undefined' +) + +checkError( + new errors.ERR_STREAM_PUSH_AFTER_EOF(), + Error, + 'Error', + 'ERR_STREAM_PUSH_AFTER_EOF', + 'stream.push() after EOF' +) + +checkError( + new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'), + Error, + 'Error', + 'ERR_METHOD_NOT_IMPLEMENTED', + 'The _read() method is not implemented' +) + +checkError( + new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'), + Error, + 'Error', + 'ERR_METHOD_NOT_IMPLEMENTED', + 'The _write() method is not implemented' +) + +checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close') + +checkError( + new errors.ERR_STREAM_DESTROYED('pipe'), + Error, + 'Error', + 'ERR_STREAM_DESTROYED', + 'Cannot call pipe after a stream was destroyed' +) + +checkError( + new errors.ERR_STREAM_DESTROYED('write'), + Error, + 'Error', + 'ERR_STREAM_DESTROYED', + 'Cannot call write after a stream was destroyed' +) + +checkError( + new errors.ERR_MULTIPLE_CALLBACK(), + Error, + 'Error', + 'ERR_MULTIPLE_CALLBACK', + 'Callback called multiple times' +) + +checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable') + +checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end') + +checkError( + new errors.ERR_STREAM_NULL_VALUES(), + TypeError, + 'TypeError', + 'ERR_STREAM_NULL_VALUES', + 'May not write null values to stream' +) + +checkError( + new errors.ERR_UNKNOWN_ENCODING('foo'), + TypeError, + 'TypeError', + 'ERR_UNKNOWN_ENCODING', + 'Unknown encoding: foo' +) + +checkError( + new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(), + Error, + 'Error', + 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT', + 'stream.unshift() after end event' +) diff --git a/test/ours/test-fake-timers.js b/test/ours/test-fake-timers.js new file mode 100644 index 0000000000..ec16c10f4d --- /dev/null +++ b/test/ours/test-fake-timers.js @@ -0,0 +1,40 @@ +'use strict' + +require('../common') +const t = require('tap') +const util = require('util') +const fakeTimers = require('@sinonjs/fake-timers') +const Transform = require('../../lib').Transform + +t.plan(1) + +function MyTransform() { + Transform.call(this) +} + +util.inherits(MyTransform, Transform) + +const clock = fakeTimers.install({ toFake: ['setImmediate', 'nextTick'] }) +let stream2DataCalled = false + +const stream = new MyTransform() +stream.on('data', function () { + stream.on('end', function () { + const stream2 = new MyTransform() + stream2.on('data', function () { + stream2.on('end', function () { + stream2DataCalled = true + }) + setImmediate(function () { + stream2.end() + }) + }) + stream2.emit('data') + }) + stream.end() +}) +stream.emit('data') + +clock.runAll() +clock.uninstall() +t.ok(stream2DataCalled) diff --git a/test/ours/test-stream-sync-write.js b/test/ours/test-stream-sync-write.js new file mode 100644 index 0000000000..7a467ec2dd --- /dev/null +++ b/test/ours/test-stream-sync-write.js @@ -0,0 +1,44 @@ +'use strict' + +require('../common') +const t = require('tap') +const util = require('util') +const stream = require('../../lib') +const WritableStream = stream.Writable + +t.plan(1) + +const InternalStream = function () { + WritableStream.call(this) +} +util.inherits(InternalStream, WritableStream) + +let invocations = 0 +InternalStream.prototype._write = function (chunk, encoding, callback) { + callback() +} + +const internalStream = new InternalStream() + +const ExternalStream = function (writable) { + this._writable = writable + WritableStream.call(this) +} +util.inherits(ExternalStream, WritableStream) + +ExternalStream.prototype._write = function (chunk, encoding, callback) { + this._writable.write(chunk, encoding, callback) +} + +const externalStream = new ExternalStream(internalStream) + +for (let i = 0; i < 2000; i++) { + externalStream.write(i.toString(), () => { + invocations++ + }) +} + +externalStream.end() +externalStream.on('finish', () => { + t.equal(invocations, 2000) +}) diff --git a/test/parallel/test-readable-from-iterator-closing.js b/test/parallel/test-readable-from-iterator-closing.js new file mode 100644 index 0000000000..c46e3fb958 --- /dev/null +++ b/test/parallel/test-readable-from-iterator-closing.js @@ -0,0 +1,212 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const { mustCall, mustNotCall } = require('../common'); +const { Readable } = require('../../lib'); +const { strictEqual } = require('assert'); + +async function asyncSupport() { + const finallyMustCall = mustCall(); + const bodyMustCall = mustCall(); + + async function* infiniteGenerate() { + try { + while (true) yield 'a'; + } finally { + finallyMustCall(); + } + } + + const stream = Readable.from(infiniteGenerate()); + + for await (const chunk of stream) { + bodyMustCall(); + strictEqual(chunk, 'a'); + break; + } +} + +async function syncSupport() { + const finallyMustCall = mustCall(); + const bodyMustCall = mustCall(); + + function* infiniteGenerate() { + try { + while (true) yield 'a'; + } finally { + finallyMustCall(); + } + } + + const stream = Readable.from(infiniteGenerate()); + + for await (const chunk of stream) { + bodyMustCall(); + strictEqual(chunk, 'a'); + break; + } +} + +async function syncPromiseSupport() { + const returnMustBeAwaited = mustCall(); + const bodyMustCall = mustCall(); + + function* infiniteGenerate() { + try { + while (true) yield Promise.resolve('a'); + } finally { + // eslint-disable-next-line no-unsafe-finally + return { then(cb) { + returnMustBeAwaited(); + cb(); + } }; + } + } + + const stream = Readable.from(infiniteGenerate()); + + for await (const chunk of stream) { + bodyMustCall(); + strictEqual(chunk, 'a'); + break; + } +} + +async function syncRejectedSupport() { + const returnMustBeAwaited = mustCall(); + const bodyMustNotCall = mustNotCall(); + const catchMustCall = mustCall(); + const secondNextMustNotCall = mustNotCall(); + + function* generate() { + try { + yield Promise.reject('a'); + secondNextMustNotCall(); + } finally { + // eslint-disable-next-line no-unsafe-finally + return { then(cb) { + returnMustBeAwaited(); + cb(); + } }; + } + } + + const stream = Readable.from(generate()); + + try { + for await (const chunk of stream) { + bodyMustNotCall(chunk); + } + } catch { + catchMustCall(); + } +} + +async function noReturnAfterThrow() { + const returnMustNotCall = mustNotCall(); + const bodyMustNotCall = mustNotCall(); + const catchMustCall = mustCall(); + const nextMustCall = mustCall(); + + const stream = Readable.from({ + [Symbol.asyncIterator]() { return this; }, + async next() { + nextMustCall(); + throw new Error('a'); + }, + async return() { + returnMustNotCall(); + return { done: true }; + }, + }); + + try { + for await (const chunk of stream) { + bodyMustNotCall(chunk); + } + } catch { + catchMustCall(); + } +} + +async function closeStreamWhileNextIsPending() { + const finallyMustCall = mustCall(); + const dataMustCall = mustCall(); + + let resolveDestroy; + const destroyed = + new Promise((resolve) => { resolveDestroy = mustCall(resolve); }); + let resolveYielded; + const yielded = + new Promise((resolve) => { resolveYielded = mustCall(resolve); }); + + async function* infiniteGenerate() { + try { + while (true) { + yield 'a'; + resolveYielded(); + await destroyed; + } + } finally { + finallyMustCall(); + } + } + + const stream = Readable.from(infiniteGenerate()); + + stream.on('data', (data) => { + dataMustCall(); + strictEqual(data, 'a'); + }); + + yielded.then(() => { + stream.destroy(); + resolveDestroy(); + }); +} + +async function closeAfterNullYielded() { + const finallyMustCall = mustCall(); + const dataMustCall = mustCall(3); + + function* generate() { + try { + yield 'a'; + yield 'a'; + yield 'a'; + } finally { + finallyMustCall(); + } + } + + const stream = Readable.from(generate()); + + stream.on('data', (chunk) => { + dataMustCall(); + strictEqual(chunk, 'a'); + }); +} + +Promise.all([ + asyncSupport(), + syncSupport(), + syncPromiseSupport(), + syncRejectedSupport(), + noReturnAfterThrow(), + closeStreamWhileNextIsPending(), + closeAfterNullYielded(), +]).then(mustCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js new file mode 100644 index 0000000000..b433ea52f4 --- /dev/null +++ b/test/parallel/test-readable-from.js @@ -0,0 +1,238 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const { mustCall } = require('../common'); +const { once } = require('events'); +const { Readable } = require('../../lib'); +const { strictEqual, throws } = require('assert'); +const common = require('../common'); + +{ + throws(() => { + Readable.from(null); + }, /ERR_INVALID_ARG_TYPE/); +} + +async function toReadableBasicSupport() { + async function* generate() { + yield 'a'; + yield 'b'; + yield 'c'; + } + + const stream = Readable.from(generate()); + + const expected = ['a', 'b', 'c']; + + for await (const chunk of stream) { + strictEqual(chunk, expected.shift()); + } +} + +async function toReadableSyncIterator() { + function* generate() { + yield 'a'; + yield 'b'; + yield 'c'; + } + + const stream = Readable.from(generate()); + + const expected = ['a', 'b', 'c']; + + for await (const chunk of stream) { + strictEqual(chunk, expected.shift()); + } +} + +async function toReadablePromises() { + const promises = [ + Promise.resolve('a'), + Promise.resolve('b'), + Promise.resolve('c'), + ]; + + const stream = Readable.from(promises); + + const expected = ['a', 'b', 'c']; + + for await (const chunk of stream) { + strictEqual(chunk, expected.shift()); + } +} + +async function toReadableString() { + const stream = Readable.from('abc'); + + const expected = ['abc']; + + for await (const chunk of stream) { + strictEqual(chunk, expected.shift()); + } +} + +async function toReadableBuffer() { + const stream = Readable.from(Buffer.from('abc')); + + const expected = ['abc']; + + for await (const chunk of stream) { + strictEqual(chunk.toString(), expected.shift()); + } +} + +async function toReadableOnData() { + async function* generate() { + yield 'a'; + yield 'b'; + yield 'c'; + } + + const stream = Readable.from(generate()); + + let iterations = 0; + const expected = ['a', 'b', 'c']; + + stream.on('data', (chunk) => { + iterations++; + strictEqual(chunk, expected.shift()); + }); + + await once(stream, 'end'); + + strictEqual(iterations, 3); +} + +async function toReadableOnDataNonObject() { + async function* generate() { + yield 'a'; + yield 'b'; + yield 'c'; + } + + const stream = Readable.from(generate(), { objectMode: false }); + + let iterations = 0; + const expected = ['a', 'b', 'c']; + + stream.on('data', (chunk) => { + iterations++; + strictEqual(chunk instanceof Buffer, true); + strictEqual(chunk.toString(), expected.shift()); + }); + + await once(stream, 'end'); + + strictEqual(iterations, 3); +} + +async function destroysTheStreamWhenThrowing() { + async function* generate() { // eslint-disable-line require-yield + throw new Error('kaboom'); + } + + const stream = Readable.from(generate()); + + stream.read(); + + const [err] = await once(stream, 'error'); + strictEqual(err.message, 'kaboom'); + strictEqual(stream.destroyed, true); + +} + +async function asTransformStream() { + async function* generate(stream) { + for await (const chunk of stream) { + yield chunk.toUpperCase(); + } + } + + const source = new Readable({ + objectMode: true, + read() { + this.push('a'); + this.push('b'); + this.push('c'); + this.push(null); + } + }); + + const stream = Readable.from(generate(source)); + + const expected = ['A', 'B', 'C']; + + for await (const chunk of stream) { + strictEqual(chunk, expected.shift()); + } +} + +async function endWithError() { + async function* generate() { + yield 1; + yield 2; + yield Promise.reject('Boum'); + } + + const stream = Readable.from(generate()); + + const expected = [1, 2]; + + try { + for await (const chunk of stream) { + strictEqual(chunk, expected.shift()); + } + throw new Error(); + } catch (err) { + strictEqual(expected.length, 0); + strictEqual(err, 'Boum'); + } +} + +async function destroyingStreamWithErrorThrowsInGenerator() { + const validateError = common.mustCall((e) => { + strictEqual(e, 'Boum'); + }); + async function* generate() { + try { + yield 1; + yield 2; + yield 3; + throw new Error(); + } catch (e) { + validateError(e); + } + } + const stream = Readable.from(generate()); + stream.read(); + stream.once('error', common.mustCall()); + stream.destroy('Boum'); +} + +Promise.all([ + toReadableBasicSupport(), + toReadableSyncIterator(), + toReadablePromises(), + toReadableString(), + toReadableBuffer(), + toReadableOnData(), + toReadableOnDataNonObject(), + destroysTheStreamWhenThrowing(), + asTransformStream(), + endWithError(), + destroyingStreamWithErrorThrowsInGenerator(), +]).then(mustCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js new file mode 100644 index 0000000000..cda0d95e89 --- /dev/null +++ b/test/parallel/test-readable-large-hwm.js @@ -0,0 +1,42 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Readable } = require('../../lib'); + +// Make sure that readable completes +// even when reading larger buffer. +const bufferSize = 10 * 1024 * 1024; +let n = 0; +const r = new Readable({ + read() { + // Try to fill readable buffer piece by piece. + r.push(Buffer.alloc(bufferSize / 10)); + + if (n++ > 10) { + r.push(null); + } + } +}); + +r.on('readable', () => { + while (true) { + const ret = r.read(bufferSize); + if (ret === null) + break; + } +}); +r.on('end', common.mustCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js new file mode 100644 index 0000000000..1d2c231784 --- /dev/null +++ b/test/parallel/test-readable-single-end.js @@ -0,0 +1,31 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable } = require('../../lib'); + +// This test ensures that there will not be an additional empty 'readable' +// event when stream has ended (only 1 event signalling about end) + +const r = new Readable({ + read: () => {}, +}); + +r.push(null); + +r.on('readable', common.mustCall()); +r.on('end', common.mustCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-add-abort-signal.js b/test/parallel/test-stream-add-abort-signal.js new file mode 100644 index 0000000000..71af04d618 --- /dev/null +++ b/test/parallel/test-stream-add-abort-signal.js @@ -0,0 +1,42 @@ +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const assert = require('assert'); +const { addAbortSignal, Readable } = require('../../lib'); +const { + addAbortSignalNoValidate, +} = require('../../lib/internal/streams/add-abort-signal'); + +{ + assert.throws(() => { + addAbortSignal('INVALID_SIGNAL'); + }, /ERR_INVALID_ARG_TYPE/); + + const ac = new AbortController(); + assert.throws(() => { + addAbortSignal(ac.signal, 'INVALID_STREAM'); + }, /ERR_INVALID_ARG_TYPE/); +} + +{ + const r = new Readable({ + read: () => {}, + }); + assert.deepStrictEqual(r, addAbortSignalNoValidate('INVALID_SIGNAL', r)); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-aliases-legacy.js b/test/parallel/test-stream-aliases-legacy.js new file mode 100644 index 0000000000..629b6a3986 --- /dev/null +++ b/test/parallel/test-stream-aliases-legacy.js @@ -0,0 +1,29 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); + +const assert = require('assert'); +const stream = require('../../lib'); + +// Verify that all individual aliases are left in place. + +assert.strictEqual(stream.Readable, require('../../lib/_stream_readable')); +assert.strictEqual(stream.Writable, require('../../lib/_stream_writable')); +assert.strictEqual(stream.Duplex, require('../../lib/_stream_duplex')); +assert.strictEqual(stream.Transform, require('../../lib/_stream_transform')); +assert.strictEqual(stream.PassThrough, require('../../lib/_stream_passthrough')); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs new file mode 100644 index 0000000000..147eabef09 --- /dev/null +++ b/test/parallel/test-stream-asIndexedPairs.mjs @@ -0,0 +1,64 @@ +import '../common/index.mjs'; +import { Readable }from '../../lib/index.js'; +import { deepStrictEqual, rejects, throws } from 'assert'; +import tap from 'tap'; + +{ + // asIndexedPairs with a synchronous stream + const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); + const empty = await Readable.from([]).asIndexedPairs().toArray(); + deepStrictEqual(empty, []); +} + +{ + // asIndexedPairs works an asynchronous streams + const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x); + const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); + const empty = await asyncFrom([]).asIndexedPairs().toArray(); + deepStrictEqual(empty, []); +} + +{ + // Does not enumerate an infinite stream + const infinite = () => Readable.from(async function* () { + while (true) yield 1; + }()); + const pairs = await infinite().asIndexedPairs().take(3).toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 1], [2, 1]]); + const empty = await infinite().asIndexedPairs().take(0).toArray(); + deepStrictEqual(empty, []); +} + +{ + // AbortSignal + await rejects(async () => { + const ac = new AbortController(); + const { signal } = ac; + const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); + ac.abort(); + await p; + }, { name: 'AbortError' }); + + await rejects(async () => { + const signal = AbortSignal.abort(); + await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); + }, /AbortError/); +} + +{ + // Error cases + throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js new file mode 100644 index 0000000000..eef741b30d --- /dev/null +++ b/test/parallel/test-stream-auto-destroy.js @@ -0,0 +1,127 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +{ + const r = new stream.Readable({ + autoDestroy: true, + read() { + this.push('hello'); + this.push('world'); + this.push(null); + }, + destroy: common.mustCall((err, cb) => cb()) + }); + + let ended = false; + + r.resume(); + + r.on('end', common.mustCall(() => { + ended = true; + })); + + r.on('close', common.mustCall(() => { + assert(ended); + })); +} + +{ + const w = new stream.Writable({ + autoDestroy: true, + write(data, enc, cb) { + cb(null); + }, + destroy: common.mustCall((err, cb) => cb()) + }); + + let finished = false; + + w.write('hello'); + w.write('world'); + w.end(); + + w.on('finish', common.mustCall(() => { + finished = true; + })); + + w.on('close', common.mustCall(() => { + assert(finished); + })); +} + +{ + const t = new stream.Transform({ + autoDestroy: true, + transform(data, enc, cb) { + cb(null, data); + }, + destroy: common.mustCall((err, cb) => cb()) + }); + + let ended = false; + let finished = false; + + t.write('hello'); + t.write('world'); + t.end(); + + t.resume(); + + t.on('end', common.mustCall(() => { + ended = true; + })); + + t.on('finish', common.mustCall(() => { + finished = true; + })); + + t.on('close', common.mustCall(() => { + assert(ended); + assert(finished); + })); +} + +{ + const r = new stream.Readable({ + read() { + r2.emit('error', new Error('fail')); + } + }); + const r2 = new stream.Readable({ + autoDestroy: true, + destroy: common.mustCall((err, cb) => cb()) + }); + + r.pipe(r2); +} + +{ + const r = new stream.Readable({ + read() { + w.emit('error', new Error('fail')); + } + }); + const w = new stream.Writable({ + autoDestroy: true, + destroy: common.mustCall((err, cb) => cb()) + }); + + r.pipe(w); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js new file mode 100644 index 0000000000..961aa1eafd --- /dev/null +++ b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js @@ -0,0 +1,43 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { PassThrough } = require('../../lib'); + +const encode = new PassThrough({ + highWaterMark: 1 +}); + +const decode = new PassThrough({ + highWaterMark: 1 +}); + +const send = common.mustCall((buf) => { + encode.write(buf); +}, 4); + +let i = 0; +const onData = common.mustCall(() => { + if (++i === 2) { + send(Buffer.from([0x3])); + send(Buffer.from([0x4])); + } +}, 4); + +encode.pipe(decode).on('data', onData); + +send(Buffer.from([0x1])); +send(Buffer.from([0x2])); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-backpressure.js b/test/parallel/test-stream-backpressure.js new file mode 100644 index 0000000000..b9051924e1 --- /dev/null +++ b/test/parallel/test-stream-backpressure.js @@ -0,0 +1,54 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +let pushes = 0; +const total = 65500 + 40 * 1024; +const rs = new stream.Readable({ + read: common.mustCall(function() { + if (pushes++ === 10) { + this.push(null); + return; + } + + const length = this._readableState.length; + + // We are at most doing two full runs of _reads + // before stopping, because Readable is greedy + // to keep its buffer full + assert(length <= total); + + this.push(Buffer.alloc(65500)); + for (let i = 0; i < 40; i++) { + this.push(Buffer.alloc(1024)); + } + + // We will be over highWaterMark at this point + // but a new call to _read is scheduled anyway. + }, 11) +}); + +const ws = stream.Writable({ + write: common.mustCall(function(data, enc, cb) { + setImmediate(cb); + }, 41 * 10) +}); + +rs.pipe(ws); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-base-prototype-accessors-enumerability.js b/test/parallel/test-stream-base-prototype-accessors-enumerability.js new file mode 100644 index 0000000000..7f3c6917e7 --- /dev/null +++ b/test/parallel/test-stream-base-prototype-accessors-enumerability.js @@ -0,0 +1,36 @@ +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); + +// This tests that the prototype accessors added by StreamBase::AddMethods +// are not enumerable. They could be enumerated when inspecting the prototype +// with util.inspect or the inspector protocol. + +const assert = require('assert'); + +// Or anything that calls StreamBase::AddMethods when setting up its prototype +const internalBinding = process.binding +const TTY = internalBinding('tty_wrap').TTY; + +{ + const ttyIsEnumerable = Object.prototype.propertyIsEnumerable.bind(TTY); + assert.strictEqual(ttyIsEnumerable('bytesRead'), false); + assert.strictEqual(ttyIsEnumerable('fd'), false); + assert.strictEqual(ttyIsEnumerable('_externalStream'), false); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-base-typechecking.js b/test/parallel/test-stream-base-typechecking.js new file mode 100644 index 0000000000..73bcc8d0a5 --- /dev/null +++ b/test/parallel/test-stream-base-typechecking.js @@ -0,0 +1,33 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +const server = net.createServer().listen(0, common.mustCall(() => { + const client = net.connect(server.address().port, common.mustCall(() => { + assert.throws(() => { + client.write('broken', 'buffer'); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE', + message: 'Second argument must be a buffer' + }); + client.destroy(); + server.close(); + })); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js new file mode 100644 index 0000000000..5b5b0a4bff --- /dev/null +++ b/test/parallel/test-stream-big-packet.js @@ -0,0 +1,80 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +let passed = false; + +class TestStream extends stream.Transform { + _transform(chunk, encoding, done) { + if (!passed) { + // Char 'a' only exists in the last write + passed = chunk.toString().includes('a'); + } + done(); + } +} + +const s1 = new stream.Transform({ + transform(chunk, encoding, cb) { + process.nextTick(cb, null, chunk); + } +}); +const s2 = new stream.PassThrough(); +const s3 = new TestStream(); +s1.pipe(s3); +// Don't let s2 auto close which may close s3 +s2.pipe(s3, { end: false }); + +// We must write a buffer larger than highWaterMark +const big = Buffer.alloc(s1.writableHighWaterMark + 1, 'x'); + +// Since big is larger than highWaterMark, it will be buffered internally. +assert(!s1.write(big)); +// 'tiny' is small enough to pass through internal buffer. +assert(s2.write('tiny')); + +// Write some small data in next IO loop, which will never be written to s3 +// Because 'drain' event is not emitted from s1 and s1 is still paused +setImmediate(s1.write.bind(s1), 'later'); + +// Assert after two IO loops when all operations have been done. +process.on('exit', function() { + assert(passed, 'Large buffer is not handled properly by Writable Stream'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js new file mode 100644 index 0000000000..b929e2abf1 --- /dev/null +++ b/test/parallel/test-stream-big-push.js @@ -0,0 +1,89 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); +const str = 'asdfasdfasdfasdfasdf'; + +const r = new stream.Readable({ + highWaterMark: 5, + encoding: 'utf8' +}); + +let reads = 0; + +function _read() { + if (reads === 0) { + setTimeout(() => { + r.push(str); + }, 1); + reads++; + } else if (reads === 1) { + const ret = r.push(str); + assert.strictEqual(ret, false); + reads++; + } else { + r.push(null); + } +} + +r._read = common.mustCall(_read, 3); + +r.on('end', common.mustCall()); + +// Push some data in to start. +// We've never gotten any read event at this point. +const ret = r.push(str); +// Should be false. > hwm +assert(!ret); +let chunk = r.read(); +assert.strictEqual(chunk, str); +chunk = r.read(); +assert.strictEqual(chunk, null); + +r.once('readable', () => { + // This time, we'll get *all* the remaining data, because + // it's been added synchronously, as the read WOULD take + // us below the hwm, and so it triggered a _read() again, + // which synchronously added more, which we then return. + chunk = r.read(); + assert.strictEqual(chunk, str + str); + + chunk = r.read(); + assert.strictEqual(chunk, null); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js new file mode 100644 index 0000000000..2accace65f --- /dev/null +++ b/test/parallel/test-stream-buffer-list.js @@ -0,0 +1,99 @@ +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const BufferList = require('../../lib/internal/streams/buffer_list'); + +// Test empty buffer list. +const emptyList = new BufferList(); + +emptyList.shift(); +assert.deepStrictEqual(emptyList, new BufferList()); + +assert.strictEqual(emptyList.join(','), ''); + +assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0)); + +const buf = Buffer.from('foo'); + +function testIterator(list, count) { + // test iterator + let len = 0; + // eslint-disable-next-line no-unused-vars + for (const x of list) { + len++; + } + assert.strictEqual(len, count); +} + +// Test buffer list with one element. +const list = new BufferList(); +testIterator(list, 0); + +list.push(buf); +testIterator(list, 1); +for (const x of list) { + assert.strictEqual(x, buf); +} + +const copy = list.concat(3); +testIterator(copy, 3); + +assert.notStrictEqual(copy, buf); +assert.deepStrictEqual(copy, buf); + +assert.strictEqual(list.join(','), 'foo'); + +const shifted = list.shift(); +testIterator(list, 0); +assert.strictEqual(shifted, buf); +assert.deepStrictEqual(list, new BufferList()); + +{ + const list = new BufferList(); + list.push('foo'); + list.push('bar'); + list.push('foo'); + list.push('bar'); + assert.strictEqual(list.consume(6, true), 'foobar'); + assert.strictEqual(list.consume(6, true), 'foobar'); +} + +{ + const list = new BufferList(); + list.push('foo'); + list.push('bar'); + assert.strictEqual(list.consume(5, true), 'fooba'); +} + +{ + const list = new BufferList(); + list.push(buf); + list.push(buf); + list.push(buf); + list.push(buf); + assert.strictEqual(list.consume(6).toString(), 'foofoo'); + assert.strictEqual(list.consume(6).toString(), 'foofoo'); +} + +{ + const list = new BufferList(); + list.push(buf); + list.push(buf); + assert.strictEqual(list.consume(5).toString(), 'foofo'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-catch-rejections.js b/test/parallel/test-stream-catch-rejections.js new file mode 100644 index 0000000000..82c76dd827 --- /dev/null +++ b/test/parallel/test-stream-catch-rejections.js @@ -0,0 +1,66 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +{ + const r = new stream.Readable({ + captureRejections: true, + read() { + } + }); + r.push('hello'); + r.push('world'); + + const err = new Error('kaboom'); + + r.on('error', common.mustCall((_err) => { + assert.strictEqual(err, _err); + assert.strictEqual(r.destroyed, true); + })); + + r.on('data', async () => { + throw err; + }); +} + +{ + const w = new stream.Writable({ + captureRejections: true, + highWaterMark: 1, + write(chunk, enc, cb) { + process.nextTick(cb); + } + }); + + const err = new Error('kaboom'); + + w.write('hello', () => { + w.write('world'); + }); + + w.on('error', common.mustCall((_err) => { + assert.strictEqual(err, _err); + assert.strictEqual(w.destroyed, true); + })); + + w.on('drain', common.mustCall(async () => { + throw err; + }, 2)); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-compose.js b/test/parallel/test-stream-compose.js new file mode 100644 index 0000000000..ae4db7ff34 --- /dev/null +++ b/test/parallel/test-stream-compose.js @@ -0,0 +1,440 @@ +// Flags: --expose-internals + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Readable, + Transform, + Writable, + finished, + PassThrough +} = require('../../lib'); +const compose = require('../../lib/internal/streams/compose'); +const assert = require('assert'); + +{ + let res = ''; + compose( + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk + chunk); + }) + }), + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }) + ) + .end('asd') + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASDASD'); + })); +} + +{ + let res = ''; + compose( + async function*(source) { + for await (const chunk of source) { + yield chunk + chunk; + } + }, + async function*(source) { + for await (const chunk of source) { + yield chunk.toString().toUpperCase(); + } + } + ) + .end('asd') + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASDASD'); + })); +} + +{ + let res = ''; + compose( + async function*(source) { + for await (const chunk of source) { + yield chunk + chunk; + } + } + ) + .end('asd') + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'asdasd'); + })); +} + +{ + let res = ''; + compose( + Readable.from(['asd']), + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }) + ) + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res = ''; + compose( + async function* () { + yield 'asd'; + }(), + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }) + ) + .on('data', common.mustCall((buf) => { + res += buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res = ''; + compose( + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + new Writable({ + write: common.mustCall((chunk, encoding, callback) => { + res += chunk; + callback(null); + }) + }) + ) + .end('asd') + .on('finish', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res = ''; + compose( + new Transform({ + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk.toString().toUpperCase()); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + async function(source) { + for await (const chunk of source) { + res += chunk; + } + } + ) + .end('asd') + .on('finish', common.mustCall(() => { + assert.strictEqual(res, 'ASD'); + })); +} + +{ + let res; + compose( + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }) + ) + .end(true) + .on('data', common.mustCall((buf) => { + res = buf; + })) + .on('end', common.mustCall(() => { + assert.strictEqual(res.chunk.chunk, true); + })); +} + +{ + const _err = new Error('asd'); + compose( + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(_err); + }) + }), + async function*(source) { + for await (const chunk of source) { + yield chunk; + } + }, + new Transform({ + objectMode: true, + transform: common.mustNotCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }) + ) + .end(true) + .on('data', common.mustNotCall()) + .on('end', common.mustNotCall()) + .on('error', (err) => { + assert.strictEqual(err, _err); + }); +} + +{ + const _err = new Error('asd'); + compose( + new Transform({ + objectMode: true, + transform: common.mustCall((chunk, encoding, callback) => { + callback(null, chunk); + }) + }), + async function*(source) { // eslint-disable-line require-yield + let tmp = ''; + for await (const chunk of source) { + tmp += chunk; + throw _err; + } + return tmp; + }, + new Transform({ + objectMode: true, + transform: common.mustNotCall((chunk, encoding, callback) => { + callback(null, { chunk }); + }) + }) + ) + .end(true) + .on('data', common.mustNotCall()) + .on('end', common.mustNotCall()) + .on('error', (err) => { + assert.strictEqual(err, _err); + }); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }(), async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + assert.strictEqual(s1.writable, false); + assert.strictEqual(s1.readable, false); + + finished(s1.resume(), common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + let buf = ''; + // Convert into transform duplex. + const s2 = compose(async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }); + s2.end('helloworld'); + s2.resume(); + s2.on('data', (chunk) => { + buf += chunk; + }); + + finished(s2.resume(), common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }()); + + // Convert into transform duplex. + const s2 = compose(async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }); + + // Convert into writable duplex. + const s3 = compose(async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + const s4 = compose(s1, s2, s3); + + finished(s4, common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }(), async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + finished(s1, common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + +{ + try { + compose(); + } catch (err) { + assert.strictEqual(err.code, 'ERR_MISSING_ARGS'); + } +} + +{ + try { + compose(new Writable(), new PassThrough()); + } catch (err) { + assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE'); + } +} + +{ + try { + compose(new PassThrough(), new Readable({ read() {} }), new PassThrough()); + } catch (err) { + assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE'); + } +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose(async function* () { + yield 'Hello'; + yield 'World'; + }(), async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + return buf; + }); + + finished(s1, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_INVALID_RETURN_VALUE'); + })); +} + +{ + let buf = ''; + + // Convert into readable Duplex. + const s1 = compose('HelloWorld', async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, async function(source) { + for await (const chunk of source) { + buf += chunk; + } + }); + + finished(s1, common.mustCall((err) => { + assert(!err); + assert.strictEqual(buf, 'HELLOWORLD'); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-construct-async-error.js b/test/parallel/test-stream-construct-async-error.js new file mode 100644 index 0000000000..c62b033dec --- /dev/null +++ b/test/parallel/test-stream-construct-async-error.js @@ -0,0 +1,255 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Duplex, + Writable, + Transform, +} = require('../../lib'); + + const st = require('timers').setTimeout; + + function setTimeout(ms) { + return new Promise(resolve => { + st(resolve, ms); + }); + } + +const assert = require('assert'); + +{ + class Foo extends Duplex { + async _destroy(err, cb) { + await setTimeout(common.platformTimeout(1)); + throw new Error('boom'); + } + } + + const foo = new Foo(); + foo.destroy(); + foo.on('error', common.expectsError({ + message: 'boom' + })); + foo.on('close', common.mustCall(() => { + assert(foo.destroyed); + })); +} + +{ + class Foo extends Duplex { + async _destroy(err, cb) { + await setTimeout(common.platformTimeout(1)); + } + } + + const foo = new Foo(); + foo.destroy(); + foo.on('close', common.mustCall(() => { + assert(foo.destroyed); + })); +} + +{ + class Foo extends Duplex { + async _construct() { + await setTimeout(common.platformTimeout(1)); + } + + _write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + + _read() {} + } + + const foo = new Foo(); + foo.write('test', common.mustCall()); +} + +{ + class Foo extends Duplex { + async _construct(callback) { + await setTimeout(common.platformTimeout(1)); + callback(); + } + + _write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + + _read() {} + } + + const foo = new Foo(); + foo.write('test', common.mustCall()); + foo.on('error', common.mustNotCall()); +} + +{ + class Foo extends Writable { + _write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + + async _final() { + await setTimeout(common.platformTimeout(1)); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('finish', common.mustCall()); +} + +{ + class Foo extends Writable { + _write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + + async _final(callback) { + await setTimeout(common.platformTimeout(1)); + callback(); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('finish', common.mustCall()); +} + +{ + class Foo extends Writable { + _write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + + async _final() { + await setTimeout(common.platformTimeout(1)); + throw new Error('boom'); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('error', common.expectsError({ + message: 'boom' + })); + foo.on('close', common.mustCall()); +} + +{ + const expected = ['hello', 'world']; + class Foo extends Transform { + async _flush() { + return 'world'; + } + + _transform(chunk, encoding, callback) { + callback(null, chunk); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(), expected.shift()); + }, 2)); +} + +{ + const expected = ['hello', 'world']; + class Foo extends Transform { + async _flush(callback) { + callback(null, 'world'); + } + + _transform(chunk, encoding, callback) { + callback(null, chunk); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(), expected.shift()); + }, 2)); +} + +{ + class Foo extends Transform { + async _flush(callback) { + throw new Error('boom'); + } + + _transform(chunk, encoding, callback) { + callback(null, chunk); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('data', common.mustCall()); + foo.on('error', common.expectsError({ + message: 'boom' + })); + foo.on('close', common.mustCall()); +} + +{ + class Foo extends Transform { + async _transform(chunk) { + return chunk.toString().toUpperCase(); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(), 'HELLO'); + })); +} + +{ + class Foo extends Transform { + async _transform(chunk, _, callback) { + callback(null, chunk.toString().toUpperCase()); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(), 'HELLO'); + })); +} + +{ + class Foo extends Transform { + async _transform() { + throw new Error('boom'); + } + } + + const foo = new Foo(); + foo.end('hello'); + foo.on('error', common.expectsError({ + message: 'boom' + })); + foo.on('close', common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-construct.js b/test/parallel/test-stream-construct.js new file mode 100644 index 0000000000..2f25ce30dc --- /dev/null +++ b/test/parallel/test-stream-construct.js @@ -0,0 +1,295 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Writable, Readable, Duplex } = require('../../lib'); +const assert = require('assert'); + +{ + // Multiple callback. + new Writable({ + construct: common.mustCall((callback) => { + callback(); + callback(); + }) + }).on('error', common.expectsError({ + name: 'Error', + code: 'ERR_MULTIPLE_CALLBACK' + })); +} + +{ + // Multiple callback. + new Readable({ + construct: common.mustCall((callback) => { + callback(); + callback(); + }) + }).on('error', common.expectsError({ + name: 'Error', + code: 'ERR_MULTIPLE_CALLBACK' + })); +} + +{ + // Synchronous error. + + new Writable({ + construct: common.mustCall((callback) => { + callback(new Error('test')); + }) + }).on('error', common.expectsError({ + name: 'Error', + message: 'test' + })); +} + +{ + // Synchronous error. + + new Readable({ + construct: common.mustCall((callback) => { + callback(new Error('test')); + }) + }).on('error', common.expectsError({ + name: 'Error', + message: 'test' + })); +} + +{ + // Asynchronous error. + + new Writable({ + construct: common.mustCall((callback) => { + process.nextTick(callback, new Error('test')); + }) + }).on('error', common.expectsError({ + name: 'Error', + message: 'test' + })); +} + +{ + // Asynchronous error. + + new Readable({ + construct: common.mustCall((callback) => { + process.nextTick(callback, new Error('test')); + }) + }).on('error', common.expectsError({ + name: 'Error', + message: 'test' + })); +} + +function testDestroy(factory) { + { + let constructed = false; + const s = factory({ + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }) + }); + s.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + s.destroy(); + } + + { + let constructed = false; + const s = factory({ + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }) + }); + s.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + s.destroy(null, () => { + assert.strictEqual(constructed, true); + }); + } + + { + let constructed = false; + const s = factory({ + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }) + }); + s.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + s.destroy(); + } + + + { + let constructed = false; + const s = factory({ + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }) + }); + s.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + s.on('error', common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom'); + })); + s.destroy(new Error('kaboom'), (err) => { + assert.strictEqual(err.message, 'kaboom'); + assert.strictEqual(constructed, true); + }); + } + + { + let constructed = false; + const s = factory({ + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }) + }); + s.on('error', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + s.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + s.destroy(new Error()); + } +} +testDestroy((opts) => new Readable({ + read: common.mustNotCall(), + ...opts +})); +testDestroy((opts) => new Writable({ + write: common.mustNotCall(), + final: common.mustNotCall(), + ...opts +})); + +{ + let constructed = false; + const r = new Readable({ + autoDestroy: true, + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }), + read: common.mustCall(() => { + assert.strictEqual(constructed, true); + r.push(null); + }) + }); + r.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + r.on('data', common.mustNotCall()); +} + +{ + let constructed = false; + const w = new Writable({ + autoDestroy: true, + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }), + write: common.mustCall((chunk, encoding, cb) => { + assert.strictEqual(constructed, true); + process.nextTick(cb); + }), + final: common.mustCall((cb) => { + assert.strictEqual(constructed, true); + process.nextTick(cb); + }) + }); + w.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + w.end('data'); +} + +{ + let constructed = false; + const w = new Writable({ + autoDestroy: true, + construct: common.mustCall((cb) => { + constructed = true; + process.nextTick(cb); + }), + write: common.mustNotCall(), + final: common.mustCall((cb) => { + assert.strictEqual(constructed, true); + process.nextTick(cb); + }) + }); + w.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); + w.end(); +} + +{ + new Duplex({ + construct: common.mustCall() + }); +} + +{ + // https://github.com/nodejs/node/issues/34448 + + let constructed = false; + const d = new Duplex({ + readable: false, + construct: common.mustCall((callback) => { + setImmediate(common.mustCall(() => { + constructed = true; + callback(); + })); + }), + write(chunk, encoding, callback) { + callback(); + }, + read() { + this.push(null); + } + }); + d.resume(); + d.end('foo'); + d.on('close', common.mustCall(() => { + assert.strictEqual(constructed, true); + })); +} + +{ + // Construct should not cause stream to read. + new Readable({ + construct: common.mustCall((callback) => { + callback(); + }), + read: common.mustNotCall() + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-decoder-objectmode.js b/test/parallel/test-stream-decoder-objectmode.js new file mode 100644 index 0000000000..8a7a09356c --- /dev/null +++ b/test/parallel/test-stream-decoder-objectmode.js @@ -0,0 +1,35 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +const readable = new stream.Readable({ + read: () => {}, + encoding: 'utf16le', + objectMode: true +}); + +readable.push(Buffer.from('abc', 'utf16le')); +readable.push(Buffer.from('def', 'utf16le')); +readable.push(null); + +// Without object mode, these would be concatenated into a single chunk. +assert.strictEqual(readable.read(), 'abc'); +assert.strictEqual(readable.read(), 'def'); +assert.strictEqual(readable.read(), null); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-destroy-event-order.js b/test/parallel/test-stream-destroy-event-order.js new file mode 100644 index 0000000000..6c7d3023ed --- /dev/null +++ b/test/parallel/test-stream-destroy-event-order.js @@ -0,0 +1,39 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +const rs = new Readable({ + read() {} +}); + +let closed = false; +let errored = false; + +rs.on('close', common.mustCall(() => { + closed = true; + assert(errored); +})); + +rs.on('error', common.mustCall((err) => { + errored = true; + assert(!closed); +})); + +rs.destroy(new Error('kaboom')); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js new file mode 100644 index 0000000000..aaaa3bde31 --- /dev/null +++ b/test/parallel/test-stream-drop-take.js @@ -0,0 +1,117 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Readable, +} = require('../../lib'); +const { deepStrictEqual, rejects, throws } = require('assert'); + +const { from } = Readable; + +const fromAsync = (...args) => from(...args).map(async (x) => x); + +const naturals = () => from(async function*() { + let i = 1; + while (true) { + yield i++; + } +}()); + +{ + // Synchronous streams + (async () => { + deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]); + deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]); + deepStrictEqual(await from([]).drop(2).toArray(), []); + deepStrictEqual(await from([]).take(1).toArray(), []); + deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]); + deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]); + deepStrictEqual(await from([1, 2]).take(0).toArray(), []); + })().then(common.mustCall()); + // Asynchronous streams + (async () => { + deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]); + deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]); + deepStrictEqual(await fromAsync([]).drop(2).toArray(), []); + deepStrictEqual(await fromAsync([]).take(1).toArray(), []); + deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]); + deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]); + deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []); + })().then(common.mustCall()); + // Infinite streams + // Asynchronous streams + (async () => { + deepStrictEqual(await naturals().take(1).toArray(), [1]); + deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]); + const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; + deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10); + deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]); + })().then(common.mustCall()); +} + +{ + // Coercion + (async () => { + // The spec made me do this ^^ + deepStrictEqual(await naturals().take('cat').toArray(), []); + deepStrictEqual(await naturals().take('2').toArray(), [1, 2]); + deepStrictEqual(await naturals().take(true).toArray(), [1]); + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + rejects( + Readable.from([1, 2, 3]).take(1, { signal: ac.signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); + rejects( + Readable.from([1, 2, 3]).drop(1, { signal: ac.signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} + +{ + // Support for AbortSignal, already aborted + const signal = AbortSignal.abort(); + rejects( + Readable.from([1, 2, 3]).take(1, { signal }).toArray(), { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + // Error cases + const invalidArgs = [ + -1, + -Infinity, + -40, + ]; + + for (const example of invalidArgs) { + throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/); + } + + throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).drop(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); + + throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).take(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js new file mode 100644 index 0000000000..f7b6a3d633 --- /dev/null +++ b/test/parallel/test-stream-duplex-destroy.js @@ -0,0 +1,272 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Duplex } = require('../../lib'); +const assert = require('assert'); + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + duplex.resume(); + + duplex.on('end', common.mustNotCall()); + duplex.on('finish', common.mustNotCall()); + duplex.on('close', common.mustCall()); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + duplex.resume(); + + const expected = new Error('kaboom'); + + duplex.on('end', common.mustNotCall()); + duplex.on('finish', common.mustNotCall()); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + duplex.destroy(expected); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }); + + const expected = new Error('kaboom'); + + duplex.on('finish', common.mustNotCall('no finish event')); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + duplex.destroy(expected); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const expected = new Error('kaboom'); + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }) + }); + duplex.resume(); + + duplex.on('end', common.mustNotCall('no end event')); + duplex.on('finish', common.mustNotCall('no finish event')); + + // Error is swallowed by the custom _destroy + duplex.on('error', common.mustNotCall('no error event')); + duplex.on('close', common.mustCall()); + + duplex.destroy(expected); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + duplex.resume(); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.push(null); + this.end(); + cb(); + }); + }); + + const fail = common.mustNotCall('no finish or end event'); + + duplex.on('finish', fail); + duplex.on('end', fail); + + duplex.destroy(); + + duplex.removeListener('end', fail); + duplex.removeListener('finish', fail); + duplex.on('end', common.mustNotCall()); + duplex.on('finish', common.mustCall()); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {} + }); + + const expected = new Error('kaboom'); + + duplex._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }); + + duplex.on('finish', common.mustNotCall('no finish event')); + duplex.on('end', common.mustNotCall('no end event')); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + allowHalfOpen: true + }); + duplex.resume(); + + duplex.on('finish', common.mustNotCall()); + duplex.on('end', common.mustNotCall()); + + duplex.destroy(); + assert.strictEqual(duplex.destroyed, true); +} + +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + + duplex.destroyed = true; + assert.strictEqual(duplex.destroyed, true); + + // The internal destroy() mechanism should not be triggered + duplex.on('finish', common.mustNotCall()); + duplex.on('end', common.mustNotCall()); + duplex.destroy(); +} + +{ + function MyDuplex() { + assert.strictEqual(this.destroyed, false); + this.destroyed = false; + Duplex.call(this); + } + + Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype); + Object.setPrototypeOf(MyDuplex, Duplex); + + new MyDuplex(); +} + +{ + const duplex = new Duplex({ + writable: false, + autoDestroy: true, + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + duplex.push(null); + duplex.resume(); + duplex.on('close', common.mustCall()); +} + +{ + const duplex = new Duplex({ + readable: false, + autoDestroy: true, + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + duplex.end(); + duplex.on('close', common.mustCall()); +} + +{ + const duplex = new Duplex({ + allowHalfOpen: false, + autoDestroy: true, + write(chunk, enc, cb) { cb(); }, + read() {}, + }); + duplex.push(null); + duplex.resume(); + const orgEnd = duplex.end; + duplex.end = common.mustNotCall(); + duplex.on('end', () => { + // Ensure end() is called in next tick to allow + // any pending writes to be invoked first. + process.nextTick(() => { + duplex.end = common.mustCall(orgEnd); + }); + }); + duplex.on('close', common.mustCall()); +} +{ + // Check abort signal + const controller = new AbortController(); + const { signal } = controller; + const duplex = new Duplex({ + write(chunk, enc, cb) { cb(); }, + read() {}, + signal, + }); + let count = 0; + duplex.on('error', common.mustCall((e) => { + assert.strictEqual(count++, 0); // Ensure not called twice + assert.strictEqual(e.name, 'AbortError'); + })); + duplex.on('close', common.mustCall()); + controller.abort(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-end.js b/test/parallel/test-stream-duplex-end.js new file mode 100644 index 0000000000..ee442fcc2c --- /dev/null +++ b/test/parallel/test-stream-duplex-end.js @@ -0,0 +1,56 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const Duplex = require('../../lib').Duplex; + +{ + const stream = new Duplex({ + read() {} + }); + assert.strictEqual(stream.allowHalfOpen, true); + stream.on('finish', common.mustNotCall()); + assert.strictEqual(stream.listenerCount('end'), 0); + stream.resume(); + stream.push(null); +} + +{ + const stream = new Duplex({ + read() {}, + allowHalfOpen: false + }); + assert.strictEqual(stream.allowHalfOpen, false); + stream.on('finish', common.mustCall()); + assert.strictEqual(stream.listenerCount('end'), 0); + stream.resume(); + stream.push(null); +} + +{ + const stream = new Duplex({ + read() {}, + allowHalfOpen: false + }); + assert.strictEqual(stream.allowHalfOpen, false); + stream._writableState.ended = true; + stream.on('finish', common.mustNotCall()); + assert.strictEqual(stream.listenerCount('end'), 0); + stream.resume(); + stream.push(null); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js new file mode 100644 index 0000000000..de972fcc6a --- /dev/null +++ b/test/parallel/test-stream-duplex-from.js @@ -0,0 +1,295 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Duplex, Readable, Writable, pipeline } = require('../../lib'); +const { Blob } = require('buffer'); + +{ + const d = Duplex.from({ + readable: new Readable({ + read() { + this.push('asd'); + this.push(null); + } + }) + }); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); +} + +{ + const d = Duplex.from(new Readable({ + read() { + this.push('asd'); + this.push(null); + } + })); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); +} + +{ + let ret = ''; + const d = Duplex.from(new Writable({ + write(chunk, encoding, callback) { + ret += chunk; + callback(); + } + })); + assert.strictEqual(d.readable, false); + assert.strictEqual(d.writable, true); + d.end('asd'); + d.on('finish', common.mustCall(function() { + assert.strictEqual(d.writable, false); + assert.strictEqual(ret, 'asd'); + })); +} + +{ + let ret = ''; + const d = Duplex.from({ + writable: new Writable({ + write(chunk, encoding, callback) { + ret += chunk; + callback(); + } + }) + }); + assert.strictEqual(d.readable, false); + assert.strictEqual(d.writable, true); + d.end('asd'); + d.on('finish', common.mustCall(function() { + assert.strictEqual(d.writable, false); + assert.strictEqual(ret, 'asd'); + })); +} + +{ + let ret = ''; + const d = Duplex.from({ + readable: new Readable({ + read() { + this.push('asd'); + this.push(null); + } + }), + writable: new Writable({ + write(chunk, encoding, callback) { + ret += chunk; + callback(); + } + }) + }); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, true); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); + d.end('asd'); + d.once('finish', common.mustCall(function() { + assert.strictEqual(d.writable, false); + assert.strictEqual(ret, 'asd'); + })); +} + +{ + const d = Duplex.from(Promise.resolve('asd')); + assert.strictEqual(d.readable, true); + assert.strictEqual(d.writable, false); + d.once('readable', common.mustCall(function() { + assert.strictEqual(d.read().toString(), 'asd'); + })); + d.once('end', common.mustCall(function() { + assert.strictEqual(d.readable, false); + })); +} + +{ + // https://github.com/nodejs/node/issues/40497 + pipeline( + ['abc\ndef\nghi'], + Duplex.from(async function * (source) { + let rest = ''; + for await (const chunk of source) { + const lines = (rest + chunk.toString()).split('\n'); + rest = lines.pop(); + for (const line of lines) { + yield line; + } + } + yield rest; + }), + async function * (source) { // eslint-disable-line require-yield + let ret = ''; + for await (const x of source) { + ret += x; + } + assert.strictEqual(ret, 'abcdefghi'); + }, + common.mustCall(() => {}), + ); +} + +// Ensure that isDuplexNodeStream was called +{ + const duplex = new Duplex(); + assert.strictEqual(Duplex.from(duplex), duplex); +} + +// Ensure that Duplex.from works for blobs +{ + const blob = new Blob(['blob']); + const expectedByteLength = blob.size; + const duplex = Duplex.from(blob); + duplex.on('data', common.mustCall((arrayBuffer) => { + assert.strictEqual(arrayBuffer.byteLength, expectedByteLength); + })); +} + +// Ensure that given a promise rejection it emits an error +{ + const myErrorMessage = 'myCustomError'; + Duplex.from(Promise.reject(myErrorMessage)) + .on('error', common.mustCall((error) => { + assert.strictEqual(error, myErrorMessage); + })); +} + +// Ensure that given a promise rejection on an async function it emits an error +{ + const myErrorMessage = 'myCustomError'; + async function asyncFn() { + return Promise.reject(myErrorMessage); + } + + Duplex.from(asyncFn) + .on('error', common.mustCall((error) => { + assert.strictEqual(error, myErrorMessage); + })); +} + +// Ensure that Duplex.from throws an Invalid return value when function is void +{ + assert.throws(() => Duplex.from(() => {}), { + code: 'ERR_INVALID_RETURN_VALUE', + }); +} + +// Ensure data if a sub object has a readable stream it's duplexified +{ + const msg = Buffer.from('hello'); + const duplex = Duplex.from({ + readable: Readable({ + read() { + this.push(msg); + this.push(null); + } + }) + }).on('data', common.mustCall((data) => { + assert.strictEqual(data, msg); + })); + + assert.strictEqual(duplex.writable, false); +} + +// Ensure data if a sub object has a writable stream it's duplexified +{ + const msg = Buffer.from('hello'); + const duplex = Duplex.from({ + writable: Writable({ + write: common.mustCall((data) => { + assert.strictEqual(data, msg); + }) + }) + }); + + duplex.write(msg); + assert.strictEqual(duplex.readable, false); +} + +// Ensure data if a sub object has a writable and readable stream it's duplexified +{ + const msg = Buffer.from('hello'); + + const duplex = Duplex.from({ + readable: Readable({ + read() { + this.push(msg); + this.push(null); + } + }), + writable: Writable({ + write: common.mustCall((data) => { + assert.strictEqual(data, msg); + }) + }) + }); + + duplex.pipe(duplex) + .on('data', common.mustCall((data) => { + assert.strictEqual(data, msg); + assert.strictEqual(duplex.readable, true); + assert.strictEqual(duplex.writable, true); + })) + .on('end', common.mustCall()); +} + +// Ensure that given readable stream that throws an error it calls destroy +{ + const myErrorMessage = 'error!'; + const duplex = Duplex.from(Readable({ + read() { + throw new Error(myErrorMessage); + } + })); + duplex.on('error', common.mustCall((msg) => { + assert.strictEqual(msg.message, myErrorMessage); + })); +} + +// Ensure that given writable stream that throws an error it calls destroy +{ + const myErrorMessage = 'error!'; + const duplex = Duplex.from(Writable({ + write(chunk, enc, cb) { + cb(myErrorMessage); + } + })); + + duplex.on('error', common.mustCall((msg) => { + assert.strictEqual(msg, myErrorMessage); + })); + + duplex.write('test'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-props.js b/test/parallel/test-stream-duplex-props.js new file mode 100644 index 0000000000..470bce950a --- /dev/null +++ b/test/parallel/test-stream-duplex-props.js @@ -0,0 +1,46 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const assert = require('assert'); +const { Duplex } = require('../../lib'); + +{ + const d = new Duplex({ + objectMode: true, + highWaterMark: 100 + }); + + assert.strictEqual(d.writableObjectMode, true); + assert.strictEqual(d.writableHighWaterMark, 100); + assert.strictEqual(d.readableObjectMode, true); + assert.strictEqual(d.readableHighWaterMark, 100); +} + +{ + const d = new Duplex({ + readableObjectMode: false, + readableHighWaterMark: 10, + writableObjectMode: true, + writableHighWaterMark: 100 + }); + + assert.strictEqual(d.writableObjectMode, true); + assert.strictEqual(d.writableHighWaterMark, 100); + assert.strictEqual(d.readableObjectMode, false); + assert.strictEqual(d.readableHighWaterMark, 10); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-readable-end.js b/test/parallel/test-stream-duplex-readable-end.js new file mode 100644 index 0000000000..f71506c903 --- /dev/null +++ b/test/parallel/test-stream-duplex-readable-end.js @@ -0,0 +1,44 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +// https://github.com/nodejs/node/issues/35926 +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +let loops = 5; + +const src = new stream.Readable({ + read() { + if (loops--) + this.push(Buffer.alloc(20000)); + } +}); + +const dst = new stream.Transform({ + transform(chunk, output, fn) { + this.push(null); + fn(); + } +}); + +src.pipe(dst); + +dst.on('data', () => { }); +dst.on('end', common.mustCall(() => { + assert.strictEqual(loops, 3); + assert.ok(src.isPaused()); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-readable-writable.js b/test/parallel/test-stream-duplex-readable-writable.js new file mode 100644 index 0000000000..3599e006c2 --- /dev/null +++ b/test/parallel/test-stream-duplex-readable-writable.js @@ -0,0 +1,61 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Duplex } = require('../../lib'); +const assert = require('assert'); + +{ + const duplex = new Duplex({ + readable: false + }); + assert.strictEqual(duplex.readable, false); + duplex.push('asd'); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PUSH_AFTER_EOF'); + })); + duplex.on('data', common.mustNotCall()); + duplex.on('end', common.mustNotCall()); +} + +{ + const duplex = new Duplex({ + writable: false, + write: common.mustNotCall() + }); + assert.strictEqual(duplex.writable, false); + duplex.write('asd'); + duplex.on('error', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + duplex.on('finish', common.mustNotCall()); +} + +{ + const duplex = new Duplex({ + readable: false + }); + assert.strictEqual(duplex.readable, false); + duplex.on('data', common.mustNotCall()); + duplex.on('end', common.mustNotCall()); + async function run() { + for await (const chunk of duplex) { + assert(false, chunk); + } + } + run().then(common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex-writable-finished.js b/test/parallel/test-stream-duplex-writable-finished.js new file mode 100644 index 0000000000..e1fe855181 --- /dev/null +++ b/test/parallel/test-stream-duplex-writable-finished.js @@ -0,0 +1,45 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Duplex } = require('../../lib'); +const assert = require('assert'); + +// basic +{ + // Find it on Duplex.prototype + assert(Reflect.has(Duplex.prototype, 'writableFinished')); +} + +// event +{ + const duplex = new Duplex(); + + duplex._write = (chunk, encoding, cb) => { + // The state finished should start in false. + assert.strictEqual(duplex.writableFinished, false); + cb(); + }; + + duplex.on('finish', common.mustCall(() => { + assert.strictEqual(duplex.writableFinished, true); + })); + + duplex.end('testing finished state', common.mustCall(() => { + assert.strictEqual(duplex.writableFinished, true); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js new file mode 100644 index 0000000000..90eb91da76 --- /dev/null +++ b/test/parallel/test-stream-duplex.js @@ -0,0 +1,70 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const assert = require('assert'); +const Duplex = require('../../lib').Duplex; + +const stream = new Duplex({ objectMode: true }); + +assert(Duplex() instanceof Duplex); +assert(stream._readableState.objectMode); +assert(stream._writableState.objectMode); +assert(stream.allowHalfOpen); +assert.strictEqual(stream.listenerCount('end'), 0); + +let written; +let read; + +stream._write = (obj, _, cb) => { + written = obj; + cb(); +}; + +stream._read = () => {}; + +stream.on('data', (obj) => { + read = obj; +}); + +stream.push({ val: 1 }); +stream.end({ val: 2 }); + +process.on('exit', () => { + assert.strictEqual(read.val, 1); + assert.strictEqual(written.val, 2); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js new file mode 100644 index 0000000000..5d02fff6e7 --- /dev/null +++ b/test/parallel/test-stream-end-paused.js @@ -0,0 +1,65 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +// Make sure we don't miss the end event for paused 0-length streams + +const Readable = require('../../lib').Readable; +const stream = new Readable(); +let calledRead = false; +stream._read = function() { + assert(!calledRead); + calledRead = true; + this.push(null); +}; + +stream.on('data', function() { + throw new Error('should not ever get data'); +}); +stream.pause(); + +setTimeout(common.mustCall(function() { + stream.on('end', common.mustCall()); + stream.resume(); +}), 1); + +process.on('exit', function() { + assert(calledRead); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-error-once.js b/test/parallel/test-stream-error-once.js new file mode 100644 index 0000000000..b9a9796e47 --- /dev/null +++ b/test/parallel/test-stream-error-once.js @@ -0,0 +1,34 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Writable, Readable } = require('../../lib'); + +{ + const writable = new Writable(); + writable.on('error', common.mustCall()); + writable.end(); + writable.write('h'); + writable.write('h'); +} + +{ + const readable = new Readable(); + readable.on('error', common.mustCall()); + readable.push(null); + readable.push('h'); + readable.push('h'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-events-prepend.js b/test/parallel/test-stream-events-prepend.js new file mode 100644 index 0000000000..9564b2f828 --- /dev/null +++ b/test/parallel/test-stream-events-prepend.js @@ -0,0 +1,41 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); + +class Writable extends stream.Writable { + constructor() { + super(); + this.prependListener = undefined; + } + + _write(chunk, end, cb) { + cb(); + } +} + +class Readable extends stream.Readable { + _read() { + this.push(null); + } +} + +const w = new Writable(); +w.on('pipe', common.mustCall()); + +const r = new Readable(); +r.pipe(w); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js new file mode 100644 index 0000000000..e5dfddc0ce --- /dev/null +++ b/test/parallel/test-stream-filter.js @@ -0,0 +1,199 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Readable, +} = require('../../lib'); +const assert = require('assert'); +const { once } = require('events'); + + const st = require('timers').setTimeout; + + function setTimeout(ms) { + return new Promise(resolve => { + st(resolve, ms); + }); + } + + +{ + // Filter works on synchronous streams with a synchronous predicate + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => x < 3); + const result = [1, 2]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Filter works on synchronous streams with an asynchronous predicate + const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => { + await Promise.resolve(); + return x > 3; + }); + const result = [4, 5]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Map works on asynchronous streams with a asynchronous mapper + const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { + await Promise.resolve(); + return x + x; + }).filter((x) => x > 5); + const result = [6, 8, 10]; + (async () => { + for await (const item of stream) { + assert.strictEqual(item, result.shift()); + } + })().then(common.mustCall()); +} + +{ + // Filter works on an infinite stream + const stream = Readable.from(async function* () { + while (true) yield 1; + }()).filter(common.mustCall(async (x) => { + return x < 3; + }, 5)); + (async () => { + let i = 1; + for await (const item of stream) { + assert.strictEqual(item, 1); + if (++i === 5) break; + } + })().then(common.mustCall()); +} + +{ + // Filter works on constructor created streams + let i = 0; + const stream = new Readable({ + read() { + if (i === 10) { + this.push(null); + return; + } + this.push(Uint8Array.from([i])); + i++; + }, + highWaterMark: 0, + }).filter(common.mustCall(async ([x]) => { + return x !== 5; + }, 10)); + (async () => { + const result = (await stream.toArray()).map((x) => x[0]); + const expected = [...Array(10).keys()].filter((x) => x !== 5); + assert.deepStrictEqual(result, expected); + })().then(common.mustCall()); +} + +{ + // Throwing an error during `filter` (sync) + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => { + if (x === 3) { + throw new Error('boom'); + } + return true; + }); + assert.rejects( + stream.map((x) => x + x).toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Throwing an error during `filter` (async) + const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => { + if (x === 3) { + throw new Error('boom'); + } + return true; + }); + assert.rejects( + stream.filter(() => true).toArray(), + /boom/, + ).then(common.mustCall()); +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + let calls = 0; + const stream = Readable.from([1, 2, 3, 4]).filter(async (_, { signal }) => { + calls++; + await once(signal, 'abort'); + }, { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + silentConsole.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); + + setImmediate(() => { + ac.abort(); + assert.strictEqual(calls, 2); + }); +} + +{ + // Concurrency result order + const stream = Readable.from([1, 2]).filter(async (item, { signal }) => { + await setTimeout(10 - item, { signal }); + return true; + }, { concurrency: 2 }); + + (async () => { + const expected = [1, 2]; + for await (const item of stream) { + assert.strictEqual(item, expected.shift()); + } + })().then(common.mustCall()); +} + +{ + // Error cases + assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).filter((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/); +} +{ + // Test result is a Readable + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true); + assert.strictEqual(stream.readable, true); +} +{ + const stream = Readable.from([1, 2, 3, 4, 5]); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(() => {}), + }); + // Check that map isn't getting called. + stream.filter(() => true); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js new file mode 100644 index 0000000000..95524c9fe7 --- /dev/null +++ b/test/parallel/test-stream-finished.js @@ -0,0 +1,673 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Writable, + Readable, + Transform, + finished, + Duplex, + PassThrough, + Stream, +} = require('../../lib'); +const assert = require('assert'); +const EE = require('events'); +const fs = require('fs'); +const { promisify } = require('util'); +const http = require('http'); + +{ + const rs = new Readable({ + read() {} + }); + + finished(rs, common.mustSucceed()); + + rs.push(null); + rs.resume(); +} + +{ + const ws = new Writable({ + write(data, enc, cb) { + cb(); + } + }); + + finished(ws, common.mustSucceed()); + + ws.end(); +} + +{ + const tr = new Transform({ + transform(data, enc, cb) { + cb(); + } + }); + + let finish = false; + let ended = false; + + tr.on('end', () => { + ended = true; + }); + + tr.on('finish', () => { + finish = true; + }); + + finished(tr, common.mustSucceed(() => { + assert(finish); + assert(ended); + })); + + tr.end(); + tr.resume(); +} + +{ + const rs = fs.createReadStream(__filename); + + rs.resume(); + finished(rs, common.mustCall()); +} + +{ + const finishedPromise = promisify(finished); + + async function run() { + const rs = fs.createReadStream(__filename); + const done = common.mustCall(); + + let ended = false; + rs.resume(); + rs.on('end', () => { + ended = true; + }); + await finishedPromise(rs); + assert(ended); + done(); + } + + run(); +} + +{ + // Check pre-cancelled + const signal = new EventTarget(); + signal.aborted = true; + + const rs = Readable.from((function* () {})()); + finished(rs, { signal }, common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError'); + })); +} + +{ + // Check cancelled before the stream ends sync. + const ac = new AbortController(); + const { signal } = ac; + + const rs = Readable.from((function* () {})()); + finished(rs, { signal }, common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError'); + })); + + ac.abort(); +} + +{ + // Check cancelled before the stream ends async. + const ac = new AbortController(); + const { signal } = ac; + + const rs = Readable.from((function* () {})()); + setTimeout(() => ac.abort(), 1); + finished(rs, { signal }, common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError'); + })); +} + +{ + // Check cancelled after doesn't throw. + const ac = new AbortController(); + const { signal } = ac; + + const rs = Readable.from((function* () { + yield 5; + setImmediate(() => ac.abort()); + })()); + rs.resume(); + finished(rs, { signal }, common.mustSucceed()); +} + +{ + // Promisified abort works + const finishedPromise = promisify(finished); + async function run() { + const ac = new AbortController(); + const { signal } = ac; + const rs = Readable.from((function* () {})()); + setImmediate(() => ac.abort()); + await finishedPromise(rs, { signal }); + } + + assert.rejects(run, { name: 'AbortError' }).then(common.mustCall()); +} + +{ + // Promisified pre-aborted works + const finishedPromise = promisify(finished); + async function run() { + const signal = new EventTarget(); + signal.aborted = true; + const rs = Readable.from((function* () {})()); + await finishedPromise(rs, { signal }); + } + + assert.rejects(run, { name: 'AbortError' }).then(common.mustCall()); +} + + +{ + const rs = fs.createReadStream('file-does-not-exist'); + + finished(rs, common.expectsError({ + code: 'ENOENT' + })); +} + +{ + const rs = new Readable(); + + finished(rs, common.mustSucceed()); + + rs.push(null); + rs.emit('close'); // Should not trigger an error + rs.resume(); +} + +{ + const rs = new Readable(); + + finished(rs, common.mustCall((err) => { + assert(err, 'premature close error'); + })); + + rs.emit('close'); // Should trigger error + rs.push(null); + rs.resume(); +} + +// Test faulty input values and options. +{ + const rs = new Readable({ + read() {} + }); + + assert.throws( + () => finished(rs, 'foo'), + { + code: 'ERR_INVALID_ARG_TYPE', + message: /callback/ + } + ); + assert.throws( + () => finished(rs, 'foo', () => {}), + { + code: 'ERR_INVALID_ARG_TYPE', + message: /options/ + } + ); + assert.throws( + () => finished(rs, {}, 'foo'), + { + code: 'ERR_INVALID_ARG_TYPE', + message: /callback/ + } + ); + + finished(rs, null, common.mustCall()); + + rs.push(null); + rs.resume(); +} + +// Test that calling returned function removes listeners +{ + const ws = new Writable({ + write(data, env, cb) { + cb(); + } + }); + const removeListener = finished(ws, common.mustNotCall()); + removeListener(); + ws.end(); +} + +{ + const rs = new Readable(); + const removeListeners = finished(rs, common.mustNotCall()); + removeListeners(); + + rs.emit('close'); + rs.push(null); + rs.resume(); +} + +{ + const streamLike = new EE(); + streamLike.readableEnded = true; + streamLike.readable = true; + finished(streamLike, common.mustCall()); + streamLike.emit('close'); +} + +{ + const writable = new Writable({ write() {} }); + writable.writable = false; + writable.destroy(); + finished(writable, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); +} + +{ + const readable = new Readable(); + readable.readable = false; + readable.destroy(); + finished(readable, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); +} + +{ + const w = new Writable({ + write(chunk, encoding, callback) { + setImmediate(callback); + } + }); + finished(w, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); + w.end('asd'); + w.destroy(); +} + +function testClosed(factory) { + { + // If already destroyed but finished is cancelled in same tick + // don't invoke the callback, + + const s = factory(); + s.destroy(); + const dispose = finished(s, common.mustNotCall()); + dispose(); + } + + { + // If already destroyed invoked callback. + + const s = factory(); + s.destroy(); + finished(s, common.mustCall()); + } + + { + // Don't invoke until destroy has completed. + + let destroyed = false; + const s = factory({ + destroy(err, cb) { + setImmediate(() => { + destroyed = true; + cb(); + }); + } + }); + s.destroy(); + finished(s, common.mustCall(() => { + assert.strictEqual(destroyed, true); + })); + } + + { + // Invoke callback even if close is inhibited. + + const s = factory({ + emitClose: false, + destroy(err, cb) { + cb(); + finished(s, common.mustCall()); + } + }); + s.destroy(); + } + + { + // Invoke with deep async. + + const s = factory({ + destroy(err, cb) { + setImmediate(() => { + cb(); + setImmediate(() => { + finished(s, common.mustCall()); + }); + }); + } + }); + s.destroy(); + } +} + +testClosed((opts) => new Readable({ ...opts })); +testClosed((opts) => new Writable({ write() {}, ...opts })); + +{ + const w = new Writable({ + write(chunk, encoding, cb) { + cb(); + }, + autoDestroy: false + }); + w.end('asd'); + process.nextTick(() => { + finished(w, common.mustCall()); + }); +} + +{ + const w = new Writable({ + write(chunk, encoding, cb) { + cb(new Error()); + }, + autoDestroy: false + }); + w.write('asd'); + w.on('error', common.mustCall(() => { + finished(w, common.mustCall()); + })); +} + +{ + const r = new Readable({ + autoDestroy: false + }); + r.push(null); + r.resume(); + r.on('end', common.mustCall(() => { + finished(r, common.mustCall()); + })); +} + +{ + const rs = fs.createReadStream(__filename, { autoClose: false }); + rs.resume(); + rs.on('close', common.mustNotCall()); + rs.on('end', common.mustCall(() => { + finished(rs, common.mustCall()); + })); +} + +{ + const d = new EE(); + d._writableState = {}; + d._writableState.finished = true; + finished(d, { readable: false, writable: true }, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); + d._writableState.errored = true; + d.emit('close'); +} + +{ + const r = new Readable(); + finished(r, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); + r.push('asd'); + r.push(null); + r.destroy(); +} + +{ + const d = new Duplex({ + final(cb) { }, // Never close writable side for test purpose + read() { + this.push(null); + } + }); + + d.on('end', common.mustCall()); + + finished(d, { readable: true, writable: false }, common.mustCall()); + + d.end(); + d.resume(); +} + +{ + const d = new Duplex({ + final(cb) { }, // Never close writable side for test purpose + read() { + this.push(null); + } + }); + + d.on('end', common.mustCall()); + + d.end(); + finished(d, { readable: true, writable: false }, common.mustCall()); + + d.resume(); +} + +{ + // Test for compat for e.g. fd-slicer which implements + // non standard destroy behavior which might not emit + // 'close'. + const r = new Readable(); + finished(r, common.mustCall()); + r.resume(); + r.push('asd'); + r.destroyed = true; + r.push(null); +} + +{ + // Regression https://github.com/nodejs/node/issues/33130 + const response = new PassThrough(); + + class HelloWorld extends Duplex { + constructor(response) { + super({ + autoDestroy: false + }); + + this.response = response; + this.readMore = false; + + response.once('end', () => { + this.push(null); + }); + + response.on('readable', () => { + if (this.readMore) { + this._read(); + } + }); + } + + _read() { + const { response } = this; + + this.readMore = true; + + if (response.readableLength) { + this.readMore = false; + } + + let data; + while ((data = response.read()) !== null) { + this.push(data); + } + } + } + + const instance = new HelloWorld(response); + instance.setEncoding('utf8'); + instance.end(); + + (async () => { + await EE.once(instance, 'finish'); + + setImmediate(() => { + response.write('chunk 1'); + response.write('chunk 2'); + response.write('chunk 3'); + response.end(); + }); + + let res = ''; + for await (const data of instance) { + res += data; + } + + assert.strictEqual(res, 'chunk 1chunk 2chunk 3'); + })().then(common.mustCall()); +} + +{ + const p = new PassThrough(); + p.end(); + finished(p, common.mustNotCall()); +} + +{ + const p = new PassThrough(); + p.end(); + p.on('finish', common.mustCall(() => { + finished(p, common.mustNotCall()); + })); +} + +{ + const server = http.createServer(common.mustCall((req, res) => { + res.on('finish', common.mustCall(() => { + finished(res, common.mustCall(() => { + server.close(); + })); + })); + res.end(); + })) + .listen(0, function() { + http.request({ + method: 'GET', + port: this.address().port + }).end() + .on('response', common.mustCall()); + }); +} + +{ + const server = http.createServer(common.mustCall((req, res) => { + req.on('close', common.mustCall(() => { + finished(req, common.mustCall(() => { + server.close(); + })); + })); + req.destroy(); + })).listen(0, function() { + http.request({ + method: 'GET', + port: this.address().port + }).end().on('error', common.mustCall()); + }); +} + +{ + const w = new Writable({ + write(chunk, encoding, callback) { + process.nextTick(callback); + } + }); + w.aborted = false; + w.end(); + let closed = false; + w.on('finish', () => { + assert.strictEqual(closed, false); + w.emit('aborted'); + }); + w.on('close', common.mustCall(() => { + closed = true; + })); + + finished(w, common.mustCall(() => { + assert.strictEqual(closed, true); + })); +} + +{ + const w = new Writable(); + const _err = new Error(); + w.destroy(_err); + finished(w, common.mustCall((err) => { + assert.strictEqual(_err, err); + finished(w, common.mustCall((err) => { + assert.strictEqual(_err, err); + })); + })); +} + +{ + const w = new Writable(); + w.destroy(); + finished(w, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + finished(w, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); + })); + })); +} + +{ + // Legacy Streams do not inherit from Readable or Writable. + // We cannot really assume anything about them, so we cannot close them + // automatically. + const s = new Stream(); + finished(s, common.mustNotCall()); +} + +{ + const server = http.createServer(common.mustCall(function(req, res) { + fs.createReadStream(__filename).pipe(res); + finished(res, common.mustCall(function(err) { + assert.strictEqual(err, undefined); + })); + })).listen(0, function() { + http.request( + { method: 'GET', port: this.address().port }, + common.mustCall(function(res) { + res.resume(); + server.close(); + }) + ).end(); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js new file mode 100644 index 0000000000..db00e554c1 --- /dev/null +++ b/test/parallel/test-stream-flatMap.js @@ -0,0 +1,154 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const fixtures = require('../common/fixtures'); +const { + Readable, +} = require('../../lib'); +const assert = require('assert'); + + const st = require('timers').setTimeout; + + function setTimeout(ms) { + return new Promise(resolve => { + st(resolve, ms); + }); + } + +const { createReadStream } = require('fs'); + +function oneTo5() { + return Readable.from([1, 2, 3, 4, 5]); +} + +{ + // flatMap works on synchronous streams with a synchronous mapper + (async () => { + assert.deepStrictEqual( + await oneTo5().flatMap((x) => [x + x]).toArray(), + [2, 4, 6, 8, 10] + ); + assert.deepStrictEqual( + await oneTo5().flatMap(() => []).toArray(), + [] + ); + assert.deepStrictEqual( + await oneTo5().flatMap((x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + })().then(common.mustCall()); +} + + +{ + // flatMap works on sync/async streams with an asynchronous mapper + (async () => { + assert.deepStrictEqual( + await oneTo5().flatMap(async (x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + const asyncOneTo5 = oneTo5().map(async (x) => x); + assert.deepStrictEqual( + await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), + [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] + ); + })().then(common.mustCall()); +} +{ + // flatMap works on a stream where mapping returns a stream + (async () => { + const result = await oneTo5().flatMap(async (x) => { + return Readable.from([x, x]); + }).toArray(); + assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]); + })().then(common.mustCall()); + // flatMap works on an objectMode stream where mappign returns a stream + (async () => { + const result = await oneTo5().flatMap(() => { + return createReadStream(fixtures.path('x.txt')); + }).toArray(); + // The resultant stream is in object mode so toArray shouldn't flatten + assert.strictEqual(result.length, 5); + assert.deepStrictEqual( + Buffer.concat(result).toString(), + (process.platform === 'win32' ? 'xyz\r\n' : 'xyz\n').repeat(5) + ); + + })().then(common.mustCall()); + +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { signal }); + }), { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + silentConsole.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); + + queueMicrotask(() => { + ac.abort(); + }); +} + +{ + // Already aborted AbortSignal + const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { signal }); + }), { signal: AbortSignal.abort() }); + // pump + assert.rejects(async () => { + for await (const item of stream) { + // nope + silentConsole.log(item); + } + }, { + name: 'AbortError', + }).then(common.mustCall()); +} + +{ + // Error cases + assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, { + concurrency: 'Foo' + }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).flatMap((x) => x, { signal: true }), /ERR_INVALID_ARG_TYPE/); +} +{ + // Test result is a Readable + const stream = oneTo5().flatMap((x) => x); + assert.strictEqual(stream.readable, true); +} +{ + const stream = oneTo5(); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(() => {}), + }); + // Check that map isn't getting called. + stream.flatMap(() => true); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js new file mode 100644 index 0000000000..a88c323619 --- /dev/null +++ b/test/parallel/test-stream-forEach.js @@ -0,0 +1,154 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Readable, +} = require('../../lib'); +const assert = require('assert'); +const { once } = require('events'); + +{ + // forEach works on synchronous streams with a synchronous predicate + const stream = Readable.from([1, 2, 3]); + const result = [1, 2, 3]; + (async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())); + })().then(common.mustCall()); +} + +{ + // forEach works an asynchronous streams + const stream = Readable.from([1, 2, 3]).filter(async (x) => { + await Promise.resolve(); + return true; + }); + const result = [1, 2, 3]; + (async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())); + })().then(common.mustCall()); +} + +{ + // forEach works on asynchronous streams with a asynchronous forEach fn + const stream = Readable.from([1, 2, 3]).filter(async (x) => { + await Promise.resolve(); + return true; + }); + const result = [1, 2, 3]; + (async () => { + await stream.forEach(async (value) => { + await Promise.resolve(); + assert.strictEqual(value, result.shift()); + }); + })().then(common.mustCall()); +} + +{ + // forEach works on an infinite stream + const ac = new AbortController(); + const { signal } = ac; + const stream = Readable.from(async function* () { + while (true) yield 1; + }(), { signal }); + let i = 0; + assert.rejects(stream.forEach(common.mustCall((x) => { + i++; + if (i === 10) ac.abort(); + assert.strictEqual(x, 1); + }, 10)), { name: 'AbortError' }).then(common.mustCall()); +} + +{ + // Emitting an error during `forEach` + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach(async (x) => { + if (x === 3) { + stream.emit('error', new Error('boom')); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `forEach` (sync) + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach((x) => { + if (x === 3) { + throw new Error('boom'); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Throwing an error during `forEach` (async) + const stream = Readable.from([1, 2, 3, 4, 5]); + assert.rejects(stream.forEach(async (x) => { + if (x === 3) { + return Promise.reject(new Error('boom')); + } + }), /boom/).then(common.mustCall()); +} + +{ + // Concurrency + AbortSignal + const ac = new AbortController(); + let calls = 0; + const forEachPromise = + Readable.from([1, 2, 3, 4]).forEach(async (_, { signal }) => { + calls++; + await once(signal, 'abort'); + }, { signal: ac.signal, concurrency: 2 }); + // pump + assert.rejects(async () => { + await forEachPromise; + }, { + name: 'AbortError', + }).then(common.mustCall()); + + setImmediate(() => { + ac.abort(); + assert.strictEqual(calls, 2); + }); +} + +{ + // Error cases + assert.rejects(async () => { + await Readable.from([1]).forEach(1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).forEach((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1]).forEach((x) => x, 1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); +} +{ + // Test result is a Promise + const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true); + assert.strictEqual(typeof stream.then, 'function'); +} +{ + const stream = Readable.from([1, 2, 3, 4, 5]); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(() => {}), + }); + // Check that map isn't getting called. + stream.forEach(() => true); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-inheritance.js b/test/parallel/test-stream-inheritance.js new file mode 100644 index 0000000000..1c0fbfb0fe --- /dev/null +++ b/test/parallel/test-stream-inheritance.js @@ -0,0 +1,78 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const { Readable, Writable, Duplex, Transform } = require('../../lib'); + +const readable = new Readable({ read() {} }); +const writable = new Writable({ write() {} }); +const duplex = new Duplex({ read() {}, write() {} }); +const transform = new Transform({ transform() {} }); + +assert.ok(readable instanceof Readable); +assert.ok(!(writable instanceof Readable)); +assert.ok(duplex instanceof Readable); +assert.ok(transform instanceof Readable); + +assert.ok(!(readable instanceof Writable)); +assert.ok(writable instanceof Writable); +assert.ok(duplex instanceof Writable); +assert.ok(transform instanceof Writable); + +assert.ok(!(readable instanceof Duplex)); +assert.ok(!(writable instanceof Duplex)); +assert.ok(duplex instanceof Duplex); +assert.ok(transform instanceof Duplex); + +assert.ok(!(readable instanceof Transform)); +assert.ok(!(writable instanceof Transform)); +assert.ok(!(duplex instanceof Transform)); +assert.ok(transform instanceof Transform); + +assert.ok(!(null instanceof Writable)); +assert.ok(!(undefined instanceof Writable)); + +// Simple inheritance check for `Writable` works fine in a subclass constructor. +function CustomWritable() { + assert.ok( + this instanceof CustomWritable, + `${this} does not inherit from CustomWritable` + ); + assert.ok( + this instanceof Writable, + `${this} does not inherit from Writable` + ); +} + +Object.setPrototypeOf(CustomWritable, Writable); +Object.setPrototypeOf(CustomWritable.prototype, Writable.prototype); + +new CustomWritable(); + +assert.throws( + CustomWritable, + { + code: 'ERR_ASSERTION', + constructor: assert.AssertionError, + message: 'undefined does not inherit from CustomWritable' + } +); + +class OtherCustomWritable extends Writable {} + +assert(!(new OtherCustomWritable() instanceof CustomWritable)); +assert(!(new CustomWritable() instanceof OtherCustomWritable)); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js new file mode 100644 index 0000000000..e2efd498c3 --- /dev/null +++ b/test/parallel/test-stream-ispaused.js @@ -0,0 +1,59 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +const readable = new stream.Readable(); + +// _read is a noop, here. +readable._read = Function(); + +// Default state of a stream is not "paused" +assert.ok(!readable.isPaused()); + +// Make the stream start flowing... +readable.on('data', Function()); + +// still not paused. +assert.ok(!readable.isPaused()); + +readable.pause(); +assert.ok(readable.isPaused()); +readable.resume(); +assert.ok(!readable.isPaused()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs new file mode 100644 index 0000000000..44507cd244 --- /dev/null +++ b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs @@ -0,0 +1,179 @@ +import { mustCall } from '../common/index.mjs'; +import { Readable }from '../../lib/index.js'; +import assert from 'assert'; +import tap from 'tap'; + +// These tests are manually ported from the draft PR for the test262 test suite +// Authored by Rick Waldron in https://github.com/tc39/test262/pull/2818/files + +// test262 license: +// The << Software identified by reference to the Ecma Standard* ("Software)">> +// is protected by copyright and is being made available under the +// "BSD License", included below. This Software may be subject to third party +// rights (rights from parties other than Ecma International), including patent +// rights, and no licenses under such third party rights are granted under this +// license even if the third party concerned is a member of Ecma International. +// SEE THE ECMA CODE OF CONDUCT IN PATENT MATTERS AVAILABLE AT +// http://www.ecma-international.org/memento/codeofconduct.htm FOR INFORMATION +// REGARDING THE LICENSING OF PATENT CLAIMS THAT ARE REQUIRED TO IMPLEMENT ECMA +// INTERNATIONAL STANDARDS* + +// Copyright (C) 2012-2013 Ecma International +// All rights reserved. + +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. Neither the name of the authors nor Ecma International may be used to +// endorse or promote products derived from this software without specific +// prior written permission. + +// THIS SOFTWARE IS PROVIDED BY THE ECMA INTERNATIONAL "AS IS" AND ANY EXPRESS +// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN +// NO EVENT SHALL ECMA INTERNATIONAL BE LIABLE FOR ANY DIRECT, INDIRECT, +// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, +// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// * Ecma International Standards hereafter means Ecma International Standards +// as well as Ecma Technical Reports + + +// Note all the tests that check AsyncIterator's prototype itself and things +// that happen before stream conversion were not ported. +{ + // asIndexedPairs/is-function + assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function'); + // asIndexedPairs/indexed-pairs.js + const iterator = Readable.from([0, 1]); + const indexedPairs = iterator.asIndexedPairs(); + + for await (const [i, v] of indexedPairs) { + assert.strictEqual(i, v); + } + // asIndexedPairs/length.js + assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0); + // asIndexedPairs/name.js + assert.strictEqual(Readable.prototype.asIndexedPairs.name, 'asIndexedPairs'); + const descriptor = Object.getOwnPropertyDescriptor( + Readable.prototype, + 'asIndexedPairs' + ); + assert.strictEqual(descriptor.enumerable, false); + assert.strictEqual(descriptor.configurable, true); + // assert.strictEqual(descriptor.writable, true); +} +{ + // drop/length + assert.strictEqual(Readable.prototype.drop.length, 1); + const descriptor = Object.getOwnPropertyDescriptor( + Readable.prototype, + 'drop' + ); + assert.strictEqual(descriptor.enumerable, false); + assert.strictEqual(descriptor.configurable, true); + // assert.strictEqual(descriptor.writable, true); + // drop/limit-equals-total + const iterator = Readable.from([1, 2]).drop(2); + const result = await iterator[Symbol.asyncIterator]().next(); + assert.deepStrictEqual(result, { done: true, value: undefined }); + // drop/limit-greater-than-total.js + const iterator2 = Readable.from([1, 2]).drop(3); + const result2 = await iterator2[Symbol.asyncIterator]().next(); + assert.deepStrictEqual(result2, { done: true, value: undefined }); + // drop/limit-less-than-total.js + const iterator3 = Readable.from([1, 2]).drop(1); + const result3 = await iterator3[Symbol.asyncIterator]().next(); + assert.deepStrictEqual(result3, { done: false, value: 2 }); + // drop/limit-rangeerror + assert.throws(() => Readable.from([1]).drop(-1), RangeError); + assert.throws(() => { + Readable.from([1]).drop({ + valueOf() { + throw new Error('boom'); + } + }); + }, /boom/); + // drop/limit-tointeger + const two = await Readable.from([1, 2]).drop({ valueOf: () => 1 }).toArray(); + assert.deepStrictEqual(two, [2]); + // drop/name + assert.strictEqual(Readable.prototype.drop.name, 'drop'); + // drop/non-constructible + assert.throws(() => new Readable.prototype.drop(1), TypeError); + // drop/proto + const proto = Object.getPrototypeOf(Readable.prototype.drop); + assert.strictEqual(proto, Function.prototype); +} +{ + // every/abrupt-iterator-close + const stream = Readable.from([1, 2, 3]); + const e = new Error(); + await assert.rejects(stream.every(mustCall(() => { + throw e; + }, 1)), e); +} +{ + // every/callable-fn + await assert.rejects(Readable.from([1, 2]).every({}), TypeError); +} +{ + // every/callable + Readable.prototype.every.call(Readable.from([]), () => {}); + // eslint-disable-next-line array-callback-return + Readable.from([]).every(() => {}); + assert.throws(() => { + const r = Readable.from([]); + new r.every(() => {}); + }, TypeError); +} + +{ + // every/false + const iterator = Readable.from([1, 2, 3]); + const result = await iterator.every((v) => v === 1); + assert.strictEqual(result, false); +} +{ + // every/every + const iterator = Readable.from([1, 2, 3]); + const result = await iterator.every((v) => true); + assert.strictEqual(result, true); +} + +{ + // every/is-function + assert.strictEqual(typeof Readable.prototype.every, 'function'); +} +{ + // every/length + assert.strictEqual(Readable.prototype.every.length, 1); + // every/name + assert.strictEqual(Readable.prototype.every.name, 'every'); + // every/propdesc + const descriptor = Object.getOwnPropertyDescriptor( + Readable.prototype, + 'every' + ); + assert.strictEqual(descriptor.enumerable, false); + assert.strictEqual(descriptor.configurable, true); + assert.strictEqual(descriptor.writable, true); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-objectmode-undefined.js b/test/parallel/test-stream-objectmode-undefined.js new file mode 100644 index 0000000000..84e5038162 --- /dev/null +++ b/test/parallel/test-stream-objectmode-undefined.js @@ -0,0 +1,59 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable, Transform } = require('../../lib'); + +{ + const stream = new Readable({ + objectMode: true, + read: common.mustCall(() => { + stream.push(undefined); + stream.push(null); + }) + }); + + stream.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined); + })); +} + +{ + const stream = new Writable({ + objectMode: true, + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined); + }) + }); + + stream.write(undefined); +} + +{ + const stream = new Transform({ + objectMode: true, + transform: common.mustCall((chunk) => { + stream.push(chunk); + }) + }); + + stream.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined); + })); + + stream.write(undefined); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-once-readable-pipe.js b/test/parallel/test-stream-once-readable-pipe.js new file mode 100644 index 0000000000..19408686a0 --- /dev/null +++ b/test/parallel/test-stream-once-readable-pipe.js @@ -0,0 +1,76 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); + +// This test ensures that if have 'readable' listener +// on Readable instance it will not disrupt the pipe. + +{ + let receivedData = ''; + const w = new Writable({ + write: (chunk, env, callback) => { + receivedData += chunk; + callback(); + }, + }); + + const data = ['foo', 'bar', 'baz']; + const r = new Readable({ + read: () => {}, + }); + + r.once('readable', common.mustCall()); + + r.pipe(w); + r.push(data[0]); + r.push(data[1]); + r.push(data[2]); + r.push(null); + + w.on('finish', common.mustCall(() => { + assert.strictEqual(receivedData, data.join('')); + })); +} + +{ + let receivedData = ''; + const w = new Writable({ + write: (chunk, env, callback) => { + receivedData += chunk; + callback(); + }, + }); + + const data = ['foo', 'bar', 'baz']; + const r = new Readable({ + read: () => {}, + }); + + r.pipe(w); + r.push(data[0]); + r.push(data[1]); + r.push(data[2]); + r.push(null); + r.once('readable', common.mustCall()); + + w.on('finish', common.mustCall(() => { + assert.strictEqual(receivedData, data.join('')); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-passthrough-drain.js b/test/parallel/test-stream-passthrough-drain.js new file mode 100644 index 0000000000..3506bc901e --- /dev/null +++ b/test/parallel/test-stream-passthrough-drain.js @@ -0,0 +1,23 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { PassThrough } = require('../../lib'); + +const pt = new PassThrough({ highWaterMark: 0 }); +pt.on('drain', common.mustCall()); +pt.write('hello'); +pt.read(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js new file mode 100644 index 0000000000..4a5b5a3237 --- /dev/null +++ b/test/parallel/test-stream-pipe-after-end.js @@ -0,0 +1,84 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); + +class TestReadable extends Readable { + constructor(opt) { + super(opt); + this._ended = false; + } + + _read() { + if (this._ended) + this.emit('error', new Error('_read called twice')); + this._ended = true; + this.push(null); + } +} + +class TestWritable extends Writable { + constructor(opt) { + super(opt); + this._written = []; + } + + _write(chunk, encoding, cb) { + this._written.push(chunk); + cb(); + } +} + +// This one should not emit 'end' until we read() from it later. +const ender = new TestReadable(); + +// What happens when you pipe() a Readable that's already ended? +const piper = new TestReadable(); +// pushes EOF null, and length=0, so this will trigger 'end' +piper.read(); + +setTimeout(common.mustCall(function() { + ender.on('end', common.mustCall()); + const c = ender.read(); + assert.strictEqual(c, null); + + const w = new TestWritable(); + w.on('finish', common.mustCall()); + piper.pipe(w); +}), 1); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js new file mode 100644 index 0000000000..8fbb9ba2b9 --- /dev/null +++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js @@ -0,0 +1,90 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +// A consumer stream with a very low highWaterMark, which starts in a state +// where it buffers the chunk it receives rather than indicating that they +// have been consumed. +const writable = new stream.Writable({ + highWaterMark: 5 +}); + +let isCurrentlyBufferingWrites = true; +const queue = []; + +writable._write = (chunk, encoding, cb) => { + if (isCurrentlyBufferingWrites) + queue.push({ chunk, cb }); + else + cb(); +}; + +const readable = new stream.Readable({ + read() {} +}); + +readable.pipe(writable); + +readable.once('pause', common.mustCall(() => { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + writable, + 'Expected awaitDrainWriters to be a Writable but instead got ' + + `${readable._readableState.awaitDrainWriters}` + ); + // First pause, resume manually. The next write() to writable will still + // return false, because chunks are still being buffered, so it will increase + // the awaitDrain counter again. + + process.nextTick(common.mustCall(() => { + readable.resume(); + })); + + readable.once('pause', common.mustCall(() => { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + writable, + '.resume() should not reset the awaitDrainWriters, but instead got ' + + `${readable._readableState.awaitDrainWriters}` + ); + // Second pause, handle all chunks from now on. Once all callbacks that + // are currently queued up are handled, the awaitDrain drain counter should + // fall back to 0 and all chunks that are pending on the readable side + // should be flushed. + isCurrentlyBufferingWrites = false; + for (const queued of queue) + queued.cb(); + })); +})); + +readable.push(Buffer.alloc(100)); // Fill the writable HWM, first 'pause'. +readable.push(Buffer.alloc(100)); // Second 'pause'. +readable.push(Buffer.alloc(100)); // Should get through to the writable. +readable.push(null); + +writable.on('finish', common.mustCall(() => { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + null, + `awaitDrainWriters should be reset to null + after all chunks are written but instead got + ${readable._readableState.awaitDrainWriters}` + ); + // Everything okay, all chunks were written. +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js new file mode 100644 index 0000000000..ac548dbb9a --- /dev/null +++ b/test/parallel/test-stream-pipe-await-drain-push-while-write.js @@ -0,0 +1,51 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +const writable = new stream.Writable({ + write: common.mustCall(function(chunk, encoding, cb) { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + null, + ); + + if (chunk.length === 32 * 1024) { // first chunk + readable.push(Buffer.alloc(34 * 1024)); // above hwm + // We should check if awaitDrain counter is increased in the next + // tick, because awaitDrain is incremented after this method finished + process.nextTick(() => { + assert.strictEqual(readable._readableState.awaitDrainWriters, writable); + }); + } + + process.nextTick(cb); + }, 3) +}); + +// A readable stream which produces two buffers. +const bufs = [Buffer.alloc(32 * 1024), Buffer.alloc(33 * 1024)]; // above hwm +const readable = new stream.Readable({ + read: function() { + while (bufs.length > 0) { + this.push(bufs.shift()); + } + } +}); + +readable.pipe(writable); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js new file mode 100644 index 0000000000..96360af761 --- /dev/null +++ b/test/parallel/test-stream-pipe-await-drain.js @@ -0,0 +1,82 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +// This is very similar to test-stream-pipe-cleanup-pause.js. + +const reader = new stream.Readable(); +const writer1 = new stream.Writable(); +const writer2 = new stream.Writable(); +const writer3 = new stream.Writable(); + +// 560000 is chosen here because it is larger than the (default) highWaterMark +// and will cause `.write()` to return false +// See: https://github.com/nodejs/node/issues/5820 +const buffer = Buffer.allocUnsafe(560000); + +reader._read = () => {}; + +writer1._write = common.mustCall(function(chunk, encoding, cb) { + this.emit('chunk-received'); + process.nextTick(cb); +}, 1); + +writer1.once('chunk-received', () => { + assert.strictEqual( + reader._readableState.awaitDrainWriters.size, + 0, + 'awaitDrain initial value should be 0, actual is ' + + reader._readableState.awaitDrainWriters.size + ); + setImmediate(() => { + // This one should *not* get through to writer1 because writer2 is not + // "done" processing. + reader.push(buffer); + }); +}); + +// A "slow" consumer: +writer2._write = common.mustCall((chunk, encoding, cb) => { + assert.strictEqual( + reader._readableState.awaitDrainWriters.size, + 1, + 'awaitDrain should be 1 after first push, actual is ' + + reader._readableState.awaitDrainWriters.size + ); + // Not calling cb here to "simulate" slow stream. + // This should be called exactly once, since the first .write() call + // will return false. +}, 1); + +writer3._write = common.mustCall((chunk, encoding, cb) => { + assert.strictEqual( + reader._readableState.awaitDrainWriters.size, + 2, + 'awaitDrain should be 2 after second push, actual is ' + + reader._readableState.awaitDrainWriters.size + ); + // Not calling cb here to "simulate" slow stream. + // This should be called exactly once, since the first .write() call + // will return false. +}, 1); + +reader.pipe(writer1); +reader.pipe(writer2); +reader.pipe(writer3); +reader.push(buffer); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js new file mode 100644 index 0000000000..9490d8944a --- /dev/null +++ b/test/parallel/test-stream-pipe-cleanup-pause.js @@ -0,0 +1,52 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); + +const reader = new stream.Readable(); +const writer1 = new stream.Writable(); +const writer2 = new stream.Writable(); + +// 560000 is chosen here because it is larger than the (default) highWaterMark +// and will cause `.write()` to return false +// See: https://github.com/nodejs/node/issues/2323 +const buffer = Buffer.allocUnsafe(560000); + +reader._read = () => {}; + +writer1._write = common.mustCall(function(chunk, encoding, cb) { + this.emit('chunk-received'); + cb(); +}, 1); +writer1.once('chunk-received', function() { + reader.unpipe(writer1); + reader.pipe(writer2); + reader.push(buffer); + setImmediate(function() { + reader.push(buffer); + setImmediate(function() { + reader.push(buffer); + }); + }); +}); + +writer2._write = common.mustCall(function(chunk, encoding, cb) { + cb(); +}, 3); + +reader.pipe(writer1); +reader.push(buffer); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js new file mode 100644 index 0000000000..7e3b13689f --- /dev/null +++ b/test/parallel/test-stream-pipe-cleanup.js @@ -0,0 +1,140 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +// This test asserts that Stream.prototype.pipe does not leave listeners +// hanging on the source or dest. +require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +function Writable() { + this.writable = true; + this.endCalls = 0; + stream.Stream.call(this); +} +Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype); +Object.setPrototypeOf(Writable, stream.Stream); +Writable.prototype.end = function() { + this.endCalls++; +}; + +Writable.prototype.destroy = function() { + this.endCalls++; +}; + +function Readable() { + this.readable = true; + stream.Stream.call(this); +} +Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype); +Object.setPrototypeOf(Readable, stream.Stream); + +function Duplex() { + this.readable = true; + Writable.call(this); +} +Object.setPrototypeOf(Duplex.prototype, Writable.prototype); +Object.setPrototypeOf(Duplex, Writable); + +let i = 0; +const limit = 100; + +let w = new Writable(); + +let r; + +for (i = 0; i < limit; i++) { + r = new Readable(); + r.pipe(w); + r.emit('end'); +} +assert.strictEqual(r.listeners('end').length, 0); +assert.strictEqual(w.endCalls, limit); + +w.endCalls = 0; + +for (i = 0; i < limit; i++) { + r = new Readable(); + r.pipe(w); + r.emit('close'); +} +assert.strictEqual(r.listeners('close').length, 0); +assert.strictEqual(w.endCalls, limit); + +w.endCalls = 0; + +r = new Readable(); + +for (i = 0; i < limit; i++) { + w = new Writable(); + r.pipe(w); + w.emit('close'); +} +assert.strictEqual(w.listeners('close').length, 0); + +r = new Readable(); +w = new Writable(); +const d = new Duplex(); +r.pipe(d); // pipeline A +d.pipe(w); // pipeline B +assert.strictEqual(r.listeners('end').length, 2); // A.onend, A.cleanup +assert.strictEqual(r.listeners('close').length, 2); // A.onclose, A.cleanup +assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup +// A.cleanup, B.onclose, B.cleanup +assert.strictEqual(d.listeners('close').length, 3); +assert.strictEqual(w.listeners('end').length, 0); +assert.strictEqual(w.listeners('close').length, 1); // B.cleanup + +r.emit('end'); +assert.strictEqual(d.endCalls, 1); +assert.strictEqual(w.endCalls, 0); +assert.strictEqual(r.listeners('end').length, 0); +assert.strictEqual(r.listeners('close').length, 0); +assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup +assert.strictEqual(d.listeners('close').length, 2); // B.onclose, B.cleanup +assert.strictEqual(w.listeners('end').length, 0); +assert.strictEqual(w.listeners('close').length, 1); // B.cleanup + +d.emit('end'); +assert.strictEqual(d.endCalls, 1); +assert.strictEqual(w.endCalls, 1); +assert.strictEqual(r.listeners('end').length, 0); +assert.strictEqual(r.listeners('close').length, 0); +assert.strictEqual(d.listeners('end').length, 0); +assert.strictEqual(d.listeners('close').length, 0); +assert.strictEqual(w.listeners('end').length, 0); +assert.strictEqual(w.listeners('close').length, 0); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js new file mode 100644 index 0000000000..531e979fd5 --- /dev/null +++ b/test/parallel/test-stream-pipe-error-handling.js @@ -0,0 +1,139 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Stream, PassThrough } = require('../../lib'); + +{ + const source = new Stream(); + const dest = new Stream(); + + source.pipe(dest); + + let gotErr = null; + source.on('error', function(err) { + gotErr = err; + }); + + const err = new Error('This stream turned into bacon.'); + source.emit('error', err); + assert.strictEqual(gotErr, err); +} + +{ + const source = new Stream(); + const dest = new Stream(); + + source.pipe(dest); + + const err = new Error('This stream turned into bacon.'); + + let gotErr = null; + try { + source.emit('error', err); + } catch (e) { + gotErr = e; + } + + assert.strictEqual(gotErr, err); +} + +{ + const R = Stream.Readable; + const W = Stream.Writable; + + const r = new R({ autoDestroy: false }); + const w = new W({ autoDestroy: false }); + let removed = false; + + r._read = common.mustCall(function() { + setTimeout(common.mustCall(function() { + assert(removed); + assert.throws(function() { + w.emit('error', new Error('fail')); + }, /^Error: fail$/); + }), 1); + }); + + w.on('error', myOnError); + r.pipe(w); + w.removeListener('error', myOnError); + removed = true; + + function myOnError() { + throw new Error('this should not happen'); + } +} + +{ + const R = Stream.Readable; + const W = Stream.Writable; + + const r = new R(); + const w = new W(); + let removed = false; + + r._read = common.mustCall(function() { + setTimeout(common.mustCall(function() { + assert(removed); + w.emit('error', new Error('fail')); + }), 1); + }); + + w.on('error', common.mustCall()); + w._write = () => {}; + + r.pipe(w); + // Removing some OTHER random listener should not do anything + w.removeListener('error', () => {}); + removed = true; +} + +{ + const _err = new Error('this should be handled'); + const destination = new PassThrough(); + destination.once('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + + const stream = new Stream(); + stream + .pipe(destination); + + destination.destroy(_err); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-error-unhandled.js b/test/parallel/test-stream-pipe-error-unhandled.js new file mode 100644 index 0000000000..43b0daeac6 --- /dev/null +++ b/test/parallel/test-stream-pipe-error-unhandled.js @@ -0,0 +1,36 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); + +process.on('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'asd'); +})); + +const r = new Readable({ + read() { + this.push('asd'); + } +}); +const w = new Writable({ + autoDestroy: true, + write() {} +}); + +r.pipe(w); +w.destroy(new Error('asd')); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js new file mode 100644 index 0000000000..d7f991a298 --- /dev/null +++ b/test/parallel/test-stream-pipe-event.js @@ -0,0 +1,66 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +function Writable() { + this.writable = true; + stream.Stream.call(this); +} +Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype); +Object.setPrototypeOf(Writable, stream.Stream); + +function Readable() { + this.readable = true; + stream.Stream.call(this); +} +Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype); +Object.setPrototypeOf(Readable, stream.Stream); + +let passed = false; + +const w = new Writable(); +w.on('pipe', function(src) { + passed = true; +}); + +const r = new Readable(); +r.pipe(w); + +assert.ok(passed); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-flow-after-unpipe.js b/test/parallel/test-stream-pipe-flow-after-unpipe.js new file mode 100644 index 0000000000..0cf392be68 --- /dev/null +++ b/test/parallel/test-stream-pipe-flow-after-unpipe.js @@ -0,0 +1,44 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Readable, Writable } = require('../../lib'); + +// Tests that calling .unpipe() un-blocks a stream that is paused because +// it is waiting on the writable side to finish a write(). + +const rs = new Readable({ + highWaterMark: 1, + // That this gets called at least 20 times is the real test here. + read: common.mustCallAtLeast(() => rs.push('foo'), 20) +}); + +const ws = new Writable({ + highWaterMark: 1, + write: common.mustCall(() => { + // Ignore the callback, this write() simply never finishes. + setImmediate(() => rs.unpipe(ws)); + }) +}); + +let chunks = 0; +rs.on('data', common.mustCallAtLeast(() => { + chunks++; + if (chunks >= 20) + rs.pause(); // Finish this test. +})); + +rs.pipe(ws); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js new file mode 100644 index 0000000000..f3f0908c67 --- /dev/null +++ b/test/parallel/test-stream-pipe-flow.js @@ -0,0 +1,105 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable, PassThrough } = require('../../lib'); + +{ + let ticks = 17; + + const rs = new Readable({ + objectMode: true, + read: () => { + if (ticks-- > 0) + return process.nextTick(() => rs.push({})); + rs.push({}); + rs.push(null); + } + }); + + const ws = new Writable({ + highWaterMark: 0, + objectMode: true, + write: (data, end, cb) => setImmediate(cb) + }); + + rs.on('end', common.mustCall()); + ws.on('finish', common.mustCall()); + rs.pipe(ws); +} + +{ + let missing = 8; + + const rs = new Readable({ + objectMode: true, + read: () => { + if (missing--) rs.push({}); + else rs.push(null); + } + }); + + const pt = rs + .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })) + .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })); + + pt.on('end', () => { + wrapper.push(null); + }); + + const wrapper = new Readable({ + objectMode: true, + read: () => { + process.nextTick(() => { + let data = pt.read(); + if (data === null) { + pt.once('readable', () => { + data = pt.read(); + if (data !== null) wrapper.push(data); + }); + } else { + wrapper.push(data); + } + }); + } + }); + + wrapper.resume(); + wrapper.on('end', common.mustCall()); +} + +{ + // Only register drain if there is backpressure. + const rs = new Readable({ read() {} }); + + const pt = rs + .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })); + assert.strictEqual(pt.listenerCount('drain'), 0); + pt.on('finish', () => { + assert.strictEqual(pt.listenerCount('drain'), 0); + }); + + rs.push('asd'); + assert.strictEqual(pt.listenerCount('drain'), 0); + + process.nextTick(() => { + rs.push('asd'); + assert.strictEqual(pt.listenerCount('drain'), 0); + rs.push(null); + assert.strictEqual(pt.listenerCount('drain'), 0); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js new file mode 100644 index 0000000000..cbf024f00f --- /dev/null +++ b/test/parallel/test-stream-pipe-manual-resume.js @@ -0,0 +1,50 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); + +function test(throwCodeInbetween) { + // Check that a pipe does not stall if .read() is called unexpectedly + // (i.e. the stream is not resumed by the pipe). + + const n = 1000; + let counter = n; + const rs = stream.Readable({ + objectMode: true, + read: common.mustCallAtLeast(() => { + if (--counter >= 0) + rs.push({ counter }); + else + rs.push(null); + }, n) + }); + + const ws = stream.Writable({ + objectMode: true, + write: common.mustCall((data, enc, cb) => { + setImmediate(cb); + }, n) + }); + + setImmediate(() => throwCodeInbetween(rs, ws)); + + rs.pipe(ws); +} + +test((rs) => rs.read()); +test((rs) => rs.resume()); +test(() => 0); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js new file mode 100644 index 0000000000..7e6d83d43d --- /dev/null +++ b/test/parallel/test-stream-pipe-multiple-pipes.js @@ -0,0 +1,66 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +const readable = new stream.Readable({ + read: () => {} +}); + +const writables = []; + +for (let i = 0; i < 5; i++) { + const target = new stream.Writable({ + write: common.mustCall((chunk, encoding, callback) => { + target.output.push(chunk); + callback(); + }, 1) + }); + target.output = []; + + target.on('pipe', common.mustCall()); + readable.pipe(target); + + + writables.push(target); +} + +const input = Buffer.from([1, 2, 3, 4, 5]); + +readable.push(input); + +// The pipe() calls will postpone emission of the 'resume' event using nextTick, +// so no data will be available to the writable streams until then. +process.nextTick(common.mustCall(() => { + for (const target of writables) { + assert.deepStrictEqual(target.output, [input]); + + target.on('unpipe', common.mustCall()); + readable.unpipe(target); + } + + readable.push('something else'); // This does not get through. + readable.push(null); + readable.resume(); // Make sure the 'end' event gets emitted. +})); + +readable.on('end', common.mustCall(() => { + for (const target of writables) { + assert.deepStrictEqual(target.output, [input]); + } +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-needDrain.js b/test/parallel/test-stream-pipe-needDrain.js new file mode 100644 index 0000000000..e4abd8a7fa --- /dev/null +++ b/test/parallel/test-stream-pipe-needDrain.js @@ -0,0 +1,46 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); + +// Pipe should pause temporarily if writable needs drain. +{ + const w = new Writable({ + write(buf, encoding, callback) { + process.nextTick(callback); + }, + highWaterMark: 1 + }); + + while (w.write('asd')); + + assert.strictEqual(w.writableNeedDrain, true); + + const r = new Readable({ + read() { + this.push('asd'); + this.push(null); + } + }); + + r.on('pause', common.mustCall(2)); + r.on('end', common.mustCall()); + + r.pipe(w); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js new file mode 100644 index 0000000000..5df30fc899 --- /dev/null +++ b/test/parallel/test-stream-pipe-same-destination-twice.js @@ -0,0 +1,93 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +// Regression test for https://github.com/nodejs/node/issues/12718. +// Tests that piping a source stream twice to the same destination stream +// works, and that a subsequent unpipe() call only removes the pipe *once*. +const assert = require('assert'); +const { PassThrough, Writable } = require('../../lib'); + +{ + const passThrough = new PassThrough(); + const dest = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert.strictEqual(`${chunk}`, 'foobar'); + cb(); + }) + }); + + passThrough.pipe(dest); + passThrough.pipe(dest); + + assert.strictEqual(passThrough._events.data.length, 2); + assert.strictEqual(passThrough._readableState.pipes.length, 2); + assert.strictEqual(passThrough._readableState.pipes[0], dest); + assert.strictEqual(passThrough._readableState.pipes[1], dest); + + passThrough.unpipe(dest); + + assert.strictEqual(passThrough._events.data.length, 1); + assert.strictEqual(passThrough._readableState.pipes.length, 1); + assert.deepStrictEqual(passThrough._readableState.pipes, [dest]); + + passThrough.write('foobar'); + passThrough.pipe(dest); +} + +{ + const passThrough = new PassThrough(); + const dest = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert.strictEqual(`${chunk}`, 'foobar'); + cb(); + }, 2) + }); + + passThrough.pipe(dest); + passThrough.pipe(dest); + + assert.strictEqual(passThrough._events.data.length, 2); + assert.strictEqual(passThrough._readableState.pipes.length, 2); + assert.strictEqual(passThrough._readableState.pipes[0], dest); + assert.strictEqual(passThrough._readableState.pipes[1], dest); + + passThrough.write('foobar'); +} + +{ + const passThrough = new PassThrough(); + const dest = new Writable({ + write: common.mustNotCall() + }); + + passThrough.pipe(dest); + passThrough.pipe(dest); + + assert.strictEqual(passThrough._events.data.length, 2); + assert.strictEqual(passThrough._readableState.pipes.length, 2); + assert.strictEqual(passThrough._readableState.pipes[0], dest); + assert.strictEqual(passThrough._readableState.pipes[1], dest); + + passThrough.unpipe(dest); + passThrough.unpipe(dest); + + assert.strictEqual(passThrough._events.data, undefined); + assert.strictEqual(passThrough._readableState.pipes.length, 0); + + passThrough.write('foobar'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js new file mode 100644 index 0000000000..c31f4fab10 --- /dev/null +++ b/test/parallel/test-stream-pipe-unpipe-streams.js @@ -0,0 +1,111 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const { Readable, Writable } = require('../../lib'); + +const source = Readable({ read: () => {} }); +const dest1 = Writable({ write: () => {} }); +const dest2 = Writable({ write: () => {} }); + +source.pipe(dest1); +source.pipe(dest2); + +dest1.on('unpipe', common.mustCall()); +dest2.on('unpipe', common.mustCall()); + +assert.strictEqual(source._readableState.pipes[0], dest1); +assert.strictEqual(source._readableState.pipes[1], dest2); +assert.strictEqual(source._readableState.pipes.length, 2); + +// Should be able to unpipe them in the reverse order that they were piped. + +source.unpipe(dest2); + +assert.deepStrictEqual(source._readableState.pipes, [dest1]); +assert.notStrictEqual(source._readableState.pipes, dest2); + +dest2.on('unpipe', common.mustNotCall()); +source.unpipe(dest2); + +source.unpipe(dest1); + +assert.strictEqual(source._readableState.pipes.length, 0); + +{ + // Test `cleanup()` if we unpipe all streams. + const source = Readable({ read: () => {} }); + const dest1 = Writable({ write: () => {} }); + const dest2 = Writable({ write: () => {} }); + + let destCount = 0; + const srcCheckEventNames = ['end', 'data']; + const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe']; + + const checkSrcCleanup = common.mustCall(() => { + assert.strictEqual(source._readableState.pipes.length, 0); + assert.strictEqual(source._readableState.flowing, false); + + srcCheckEventNames.forEach((eventName) => { + assert.strictEqual( + source.listenerCount(eventName), 0, + `source's '${eventName}' event listeners not removed` + ); + }); + }); + + function checkDestCleanup(dest) { + const currentDestId = ++destCount; + source.pipe(dest); + + const unpipeChecker = common.mustCall(() => { + assert.deepStrictEqual( + dest.listeners('unpipe'), [unpipeChecker], + `destination{${currentDestId}} should have a 'unpipe' event ` + + 'listener which is `unpipeChecker`' + ); + dest.removeListener('unpipe', unpipeChecker); + destCheckEventNames.forEach((eventName) => { + assert.strictEqual( + dest.listenerCount(eventName), 0, + `destination{${currentDestId}}'s '${eventName}' event ` + + 'listeners not removed' + ); + }); + + if (--destCount === 0) + checkSrcCleanup(); + }); + + dest.on('unpipe', unpipeChecker); + } + + checkDestCleanup(dest1); + checkDestCleanup(dest2); + source.unpipe(); +} + +{ + const src = Readable({ read: () => {} }); + const dst = Writable({ write: () => {} }); + src.pipe(dst); + src.on('resume', common.mustCall(() => { + src.on('pause', common.mustCall()); + src.unpipe(dst); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js new file mode 100644 index 0000000000..8576d18eb0 --- /dev/null +++ b/test/parallel/test-stream-pipe-without-listenerCount.js @@ -0,0 +1,32 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); + +const r = new stream.Stream(); +r.listenerCount = undefined; + +const w = new stream.Stream(); +w.listenerCount = undefined; + +w.on('pipe', function() { + r.emit('error', new Error('Readable Error')); + w.emit('error', new Error('Writable Error')); +}); +r.on('error', common.mustCall()); +w.on('error', common.mustCall()); +r.pipe(w); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-async-iterator.js b/test/parallel/test-stream-pipeline-async-iterator.js new file mode 100644 index 0000000000..241ab9b1c1 --- /dev/null +++ b/test/parallel/test-stream-pipeline-async-iterator.js @@ -0,0 +1,46 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable, PassThrough, pipeline } = require('../../lib'); +const assert = require('assert'); + +const _err = new Error('kaboom'); + +async function run() { + const source = new Readable({ + read() { + } + }); + source.push('hello'); + source.push('world'); + + setImmediate(() => { source.destroy(_err); }); + + const iterator = pipeline( + source, + new PassThrough(), + () => {}); + + iterator.setEncoding('utf8'); + + for await (const k of iterator) { + assert.strictEqual(k, 'helloworld'); + } +} + +run().catch(common.mustCall((err) => assert.strictEqual(err, _err))); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-http2.js b/test/parallel/test-stream-pipeline-http2.js new file mode 100644 index 0000000000..c0b02fc98c --- /dev/null +++ b/test/parallel/test-stream-pipeline-http2.js @@ -0,0 +1,51 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const { Readable, pipeline } = require('../../lib'); +const http2 = require('http2'); + +{ + const server = http2.createServer((req, res) => { + pipeline(req, res, common.mustCall()); + }); + + server.listen(0, () => { + const url = `http://localhost:${server.address().port}`; + const client = http2.connect(url); + const req = client.request({ ':method': 'POST' }); + + const rs = new Readable({ + read() { + rs.push('hello'); + } + }); + + pipeline(rs, req, common.mustCall((err) => { + server.close(); + client.close(); + })); + + let cnt = 10; + req.on('data', (data) => { + cnt--; + if (cnt === 0) rs.destroy(); + }); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-process.js b/test/parallel/test-stream-pipeline-process.js new file mode 100644 index 0000000000..da75c59153 --- /dev/null +++ b/test/parallel/test-stream-pipeline-process.js @@ -0,0 +1,41 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const os = require('os'); + +if (process.argv[2] === 'child') { + const { pipeline } = require('../../lib'); + pipeline( + process.stdin, + process.stdout, + common.mustSucceed() + ); +} else { + const cp = require('child_process'); + cp.exec([ + 'echo', + 'hello', + '|', + `"${process.execPath}"`, + `"${__filename}"`, + 'child', + ].join(' '), common.mustSucceed((stdout) => { + assert.strictEqual(stdout.split(os.EOL).shift().trim(), 'hello'); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js new file mode 100644 index 0000000000..91089ef329 --- /dev/null +++ b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js @@ -0,0 +1,54 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Duplex, pipeline } = require('../../lib'); + +// Test that the callback for pipeline() is called even when the ._destroy() +// method of the stream places an .end() request to itself that does not +// get processed before the destruction of the stream (i.e. the 'close' event). +// Refs: https://github.com/nodejs/node/issues/24456 + +const readable = new Readable({ + read: common.mustCall(() => {}) +}); + +const duplex = new Duplex({ + write(chunk, enc, cb) { + // Simulate messages queueing up. + }, + read() {}, + destroy(err, cb) { + // Call end() from inside the destroy() method, like HTTP/2 streams + // do at the time of writing. + this.end(); + cb(err); + } +}); + +duplex.on('finished', common.mustNotCall()); + +pipeline(readable, duplex, common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); +})); + +// Write one chunk of data, and destroy the stream later. +// That should trigger the pipeline destruction. +readable.push('foo'); +setImmediate(() => { + readable.destroy(); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-uncaught.js b/test/parallel/test-stream-pipeline-uncaught.js new file mode 100644 index 0000000000..b83ef016d4 --- /dev/null +++ b/test/parallel/test-stream-pipeline-uncaught.js @@ -0,0 +1,37 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + pipeline, + PassThrough +} = require('../../lib'); +const assert = require('assert'); + +process.on('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'error'); +})); + +// Ensure that pipeline that ends with Promise +// still propagates error to uncaughtException. +const s = new PassThrough(); +s.end('data'); +pipeline(s, async function(source) { + for await (const chunk of source) { } // eslint-disable-line no-unused-vars, no-empty +}, common.mustSucceed(() => { + throw new Error('error'); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-with-empty-string.js b/test/parallel/test-stream-pipeline-with-empty-string.js new file mode 100644 index 0000000000..15f70919ad --- /dev/null +++ b/test/parallel/test-stream-pipeline-with-empty-string.js @@ -0,0 +1,33 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + pipeline, + PassThrough +} = require('../../lib'); + + +async function runTest() { + await pipeline( + '', + new PassThrough({ objectMode: true }), + common.mustCall(() => { }) + ); +} + +runTest().then(common.mustCall(() => {})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-preprocess.js b/test/parallel/test-stream-preprocess.js new file mode 100644 index 0000000000..b55cd5780d --- /dev/null +++ b/test/parallel/test-stream-preprocess.js @@ -0,0 +1,75 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const fs = require('fs'); +const rl = require('readline'); +const fixtures = require('../common/fixtures'); + +const BOM = '\uFEFF'; + +// Get the data using a non-stream way to compare with the streamed data. +const modelData = fixtures.readSync('file-to-read-without-bom.txt', 'utf8'); +const modelDataFirstCharacter = modelData[0]; + +// Detect the number of forthcoming 'line' events for mustCall() 'expected' arg. +const lineCount = modelData.match(/\n/g).length; + +// Ensure both without-bom and with-bom test files are textwise equal. +assert.strictEqual(fixtures.readSync('file-to-read-with-bom.txt', 'utf8'), + `${BOM}${modelData}` +); + +// An unjustified BOM stripping with a non-BOM character unshifted to a stream. +const inputWithoutBOM = + fs.createReadStream(fixtures.path('file-to-read-without-bom.txt'), 'utf8'); + +inputWithoutBOM.once('readable', common.mustCall(() => { + const maybeBOM = inputWithoutBOM.read(1); + assert.strictEqual(maybeBOM, modelDataFirstCharacter); + assert.notStrictEqual(maybeBOM, BOM); + + inputWithoutBOM.unshift(maybeBOM); + + let streamedData = ''; + rl.createInterface({ + input: inputWithoutBOM, + }).on('line', common.mustCall((line) => { + streamedData += `${line}\n`; + }, lineCount)).on('close', common.mustCall(() => { + assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData); + })); +})); + +// A justified BOM stripping. +const inputWithBOM = + fs.createReadStream(fixtures.path('file-to-read-with-bom.txt'), 'utf8'); + +inputWithBOM.once('readable', common.mustCall(() => { + const maybeBOM = inputWithBOM.read(1); + assert.strictEqual(maybeBOM, BOM); + + let streamedData = ''; + rl.createInterface({ + input: inputWithBOM, + }).on('line', common.mustCall((line) => { + streamedData += `${line}\n`; + }, lineCount)).on('close', common.mustCall(() => { + assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData); + })); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-promises.js b/test/parallel/test-stream-promises.js new file mode 100644 index 0000000000..51e86db5ad --- /dev/null +++ b/test/parallel/test-stream-promises.js @@ -0,0 +1,118 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); +const { + Readable, + Writable, + promises, +} = stream; +const { + finished, + pipeline, +} = require('../../lib/stream/promises'); +const fs = require('fs'); +const assert = require('assert'); +const { promisify } = require('util'); + +assert.strictEqual(promises.pipeline, pipeline); +assert.strictEqual(promises.finished, finished); +assert.strictEqual(pipeline, promisify(stream.pipeline)); +assert.strictEqual(finished, promisify(stream.finished)); + +// pipeline success +{ + let finished = false; + const processed = []; + const expected = [ + Buffer.from('a'), + Buffer.from('b'), + Buffer.from('c'), + ]; + + const read = new Readable({ + read() { } + }); + + const write = new Writable({ + write(data, enc, cb) { + processed.push(data); + cb(); + } + }); + + write.on('finish', () => { + finished = true; + }); + + for (let i = 0; i < expected.length; i++) { + read.push(expected[i]); + } + read.push(null); + + pipeline(read, write).then(common.mustCall((value) => { + assert.ok(finished); + assert.deepStrictEqual(processed, expected); + })); +} + +// pipeline error +{ + const read = new Readable({ + read() { } + }); + + const write = new Writable({ + write(data, enc, cb) { + cb(); + } + }); + + read.push('data'); + setImmediate(() => read.destroy()); + + pipeline(read, write).catch(common.mustCall((err) => { + assert.ok(err, 'should have an error'); + })); +} + +// finished success +{ + async function run() { + const rs = fs.createReadStream(__filename); + + let ended = false; + rs.resume(); + rs.on('end', () => { + ended = true; + }); + await finished(rs); + assert(ended); + } + + run().then(common.mustCall()); +} + +// finished error +{ + const rs = fs.createReadStream('file-does-not-exist'); + + assert.rejects(finished(rs), { + code: 'ENOENT' + }).then(common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js new file mode 100644 index 0000000000..96eec48dbb --- /dev/null +++ b/test/parallel/test-stream-push-order.js @@ -0,0 +1,67 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const Readable = require('../../lib').Readable; +const assert = require('assert'); + +const s = new Readable({ + highWaterMark: 20, + encoding: 'ascii' +}); + +const list = ['1', '2', '3', '4', '5', '6']; + +s._read = function(n) { + const one = list.shift(); + if (!one) { + s.push(null); + } else { + const two = list.shift(); + s.push(one); + s.push(two); + } +}; + +s.read(0); + +// ACTUALLY [1, 3, 5, 6, 4, 2] + +process.on('exit', function() { + assert.strictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6'); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js new file mode 100644 index 0000000000..e7269e6379 --- /dev/null +++ b/test/parallel/test-stream-push-strings.js @@ -0,0 +1,82 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const Readable = require('../../lib').Readable; + +class MyStream extends Readable { + constructor(options) { + super(options); + this._chunks = 3; + } + + _read(n) { + switch (this._chunks--) { + case 0: + return this.push(null); + case 1: + return setTimeout(() => { + this.push('last chunk'); + }, 100); + case 2: + return this.push('second to last chunk'); + case 3: + return process.nextTick(() => { + this.push('first chunk'); + }); + default: + throw new Error('?'); + } + } +} + +const ms = new MyStream(); +const results = []; +ms.on('readable', function() { + let chunk; + while (null !== (chunk = ms.read())) + results.push(String(chunk)); +}); + +const expect = [ 'first chunksecond to last chunk', 'last chunk' ]; +process.on('exit', function() { + assert.strictEqual(ms._chunks, -1); + assert.deepStrictEqual(results, expect); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-aborted.js b/test/parallel/test-stream-readable-aborted.js new file mode 100644 index 0000000000..2d0569129f --- /dev/null +++ b/test/parallel/test-stream-readable-aborted.js @@ -0,0 +1,81 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable, Duplex } = require('../../lib'); + +{ + const readable = new Readable({ + read() { + } + }); + assert.strictEqual(readable.readableAborted, false); + readable.destroy(); + assert.strictEqual(readable.readableAborted, true); +} + +{ + const readable = new Readable({ + read() { + } + }); + assert.strictEqual(readable.readableAborted, false); + readable.push(null); + readable.destroy(); + assert.strictEqual(readable.readableAborted, true); +} + +{ + const readable = new Readable({ + read() { + } + }); + assert.strictEqual(readable.readableAborted, false); + readable.push('asd'); + readable.destroy(); + assert.strictEqual(readable.readableAborted, true); +} + +{ + const readable = new Readable({ + read() { + } + }); + assert.strictEqual(readable.readableAborted, false); + readable.push('asd'); + readable.push(null); + assert.strictEqual(readable.readableAborted, false); + readable.on('end', common.mustCall(() => { + assert.strictEqual(readable.readableAborted, false); + readable.destroy(); + assert.strictEqual(readable.readableAborted, false); + queueMicrotask(() => { + assert.strictEqual(readable.readableAborted, false); + }); + })); + readable.resume(); +} + +{ + const duplex = new Duplex({ + readable: false, + write() {} + }); + duplex.destroy(); + assert.strictEqual(duplex.readableAborted, false); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-add-chunk-during-data.js b/test/parallel/test-stream-readable-add-chunk-during-data.js new file mode 100644 index 0000000000..d0d7d2c8c4 --- /dev/null +++ b/test/parallel/test-stream-readable-add-chunk-during-data.js @@ -0,0 +1,36 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +// Verify that .push() and .unshift() can be called from 'data' listeners. + +for (const method of ['push', 'unshift']) { + const r = new Readable({ read() {} }); + r.once('data', common.mustCall((chunk) => { + assert.strictEqual(r.readableLength, 0); + r[method](chunk); + assert.strictEqual(r.readableLength, chunk.length); + + r.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(), 'Hello, world'); + })); + })); + + r.push('Hello, world'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js new file mode 100644 index 0000000000..fb4a314b34 --- /dev/null +++ b/test/parallel/test-stream-readable-constructor-set-methods.js @@ -0,0 +1,26 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const Readable = require('../../lib').Readable; + +const _read = common.mustCall(function _read(n) { + this.push(null); +}); + +const r = new Readable({ read: _read }); +r.resume(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-data.js b/test/parallel/test-stream-readable-data.js new file mode 100644 index 0000000000..2589f96326 --- /dev/null +++ b/test/parallel/test-stream-readable-data.js @@ -0,0 +1,34 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const { Readable } = require('../../lib'); + +const readable = new Readable({ + read() {} +}); + +function read() {} + +readable.setEncoding('utf8'); +readable.on('readable', read); +readable.removeListener('readable', read); + +process.nextTick(function() { + readable.on('data', common.mustCall()); + readable.push('hello'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js new file mode 100644 index 0000000000..56e7fe5944 --- /dev/null +++ b/test/parallel/test-stream-readable-destroy.js @@ -0,0 +1,418 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable, addAbortSignal } = require('../../lib'); +const assert = require('assert'); + +{ + const read = new Readable({ + read() {} + }); + read.resume(); + + read.on('close', common.mustCall()); + + read.destroy(); + assert.strictEqual(read.destroyed, true); +} + +{ + const read = new Readable({ + read() {} + }); + read.resume(); + + const expected = new Error('kaboom'); + + read.on('end', common.mustNotCall('no end event')); + read.on('close', common.mustCall()); + read.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + read.destroy(expected); + assert.strictEqual(read.destroyed, true); +} + +{ + const read = new Readable({ + read() {} + }); + + read._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }); + + const expected = new Error('kaboom'); + + read.on('end', common.mustNotCall('no end event')); + read.on('close', common.mustCall()); + read.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + read.destroy(expected); + assert.strictEqual(read.destroyed, true); +} + +{ + const read = new Readable({ + read() {}, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }) + }); + + const expected = new Error('kaboom'); + + read.on('end', common.mustNotCall('no end event')); + + // Error is swallowed by the custom _destroy + read.on('error', common.mustNotCall('no error event')); + read.on('close', common.mustCall()); + + read.destroy(expected); + assert.strictEqual(read.destroyed, true); +} + +{ + const read = new Readable({ + read() {} + }); + + read._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }); + + read.destroy(); + assert.strictEqual(read.destroyed, true); +} + +{ + const read = new Readable({ + read() {} + }); + read.resume(); + + read._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.push(null); + cb(); + }); + }); + + const fail = common.mustNotCall('no end event'); + + read.on('end', fail); + read.on('close', common.mustCall()); + + read.destroy(); + + read.removeListener('end', fail); + read.on('end', common.mustNotCall()); + assert.strictEqual(read.destroyed, true); +} + +{ + const read = new Readable({ + read() {} + }); + + const expected = new Error('kaboom'); + + read._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }); + + let ticked = false; + read.on('end', common.mustNotCall('no end event')); + read.on('error', common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(read._readableState.errorEmitted, true); + assert.strictEqual(read._readableState.errored, expected); + assert.strictEqual(err, expected); + })); + + read.destroy(); + assert.strictEqual(read._readableState.errorEmitted, false); + assert.strictEqual(read._readableState.errored, expected); + assert.strictEqual(read.destroyed, true); + ticked = true; +} + +{ + const read = new Readable({ + read() {} + }); + read.resume(); + + read.destroyed = true; + assert.strictEqual(read.destroyed, true); + + // The internal destroy() mechanism should not be triggered + read.on('end', common.mustNotCall()); + read.destroy(); +} + +{ + function MyReadable() { + assert.strictEqual(this.destroyed, false); + this.destroyed = false; + Readable.call(this); + } + + Object.setPrototypeOf(MyReadable.prototype, Readable.prototype); + Object.setPrototypeOf(MyReadable, Readable); + + new MyReadable(); +} + +{ + // Destroy and destroy callback + const read = new Readable({ + read() {} + }); + read.resume(); + + const expected = new Error('kaboom'); + + let ticked = false; + read.on('close', common.mustCall(() => { + assert.strictEqual(read._readableState.errorEmitted, true); + assert.strictEqual(ticked, true); + })); + read.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + assert.strictEqual(read._readableState.errored, null); + assert.strictEqual(read._readableState.errorEmitted, false); + + read.destroy(expected, common.mustCall(function(err) { + assert.strictEqual(read._readableState.errored, expected); + assert.strictEqual(err, expected); + })); + assert.strictEqual(read._readableState.errorEmitted, false); + assert.strictEqual(read._readableState.errored, expected); + ticked = true; +} + +{ + const readable = new Readable({ + destroy: common.mustCall(function(err, cb) { + process.nextTick(cb, new Error('kaboom 1')); + }), + read() {} + }); + + let ticked = false; + readable.on('close', common.mustCall(() => { + assert.strictEqual(ticked, true); + assert.strictEqual(readable._readableState.errorEmitted, true); + })); + readable.on('error', common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.message, 'kaboom 1'); + assert.strictEqual(readable._readableState.errorEmitted, true); + })); + + readable.destroy(); + assert.strictEqual(readable.destroyed, true); + assert.strictEqual(readable._readableState.errored, null); + assert.strictEqual(readable._readableState.errorEmitted, false); + + // Test case where `readable.destroy()` is called again with an error before + // the `_destroy()` callback is called. + readable.destroy(new Error('kaboom 2')); + assert.strictEqual(readable._readableState.errorEmitted, false); + assert.strictEqual(readable._readableState.errored, null); + + ticked = true; +} + +{ + const read = new Readable({ + read() {} + }); + + read.destroy(); + read.push('hi'); + read.on('data', common.mustNotCall()); +} + +{ + const read = new Readable({ + read: common.mustNotCall(function() {}) + }); + read.destroy(); + assert.strictEqual(read.destroyed, true); + read.read(); +} + +{ + const read = new Readable({ + autoDestroy: false, + read() { + this.push(null); + this.push('asd'); + } + }); + + read.on('error', common.mustCall(() => { + assert(read._readableState.errored); + })); + read.resume(); +} + +{ + const controller = new AbortController(); + const read = addAbortSignal(controller.signal, new Readable({ + read() { + this.push('asd'); + }, + })); + + read.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + })); + controller.abort(); + read.on('data', common.mustNotCall()); +} + +{ + const controller = new AbortController(); + const read = new Readable({ + signal: controller.signal, + read() { + this.push('asd'); + }, + }); + + read.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + })); + controller.abort(); + read.on('data', common.mustNotCall()); +} + +{ + const controller = new AbortController(); + const read = addAbortSignal(controller.signal, new Readable({ + objectMode: true, + read() { + return false; + } + })); + read.push('asd'); + + read.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + })); + assert.rejects((async () => { + // eslint-disable-next-line no-unused-vars, no-empty + for await (const chunk of read) { } + })(), /AbortError/); + setTimeout(() => controller.abort(), 0); +} + +{ + const read = new Readable({ + read() { + }, + }); + + read.on('data', common.mustNotCall()); + read.on('error', common.mustCall((e) => { + read.push('asd'); + read.read(); + })); + read.on('close', common.mustCall((e) => { + read.push('asd'); + read.read(); + })); + read.destroy(new Error('asd')); +} + +{ + const read = new Readable({ + read() { + }, + }); + + read.on('data', common.mustNotCall()); + read.on('close', common.mustCall((e) => { + read.push('asd'); + read.read(); + })); + read.destroy(); +} + +{ + const read = new Readable({ + read() { + }, + }); + + read.on('data', common.mustNotCall()); + read.on('close', common.mustCall((e) => { + read.push('asd'); + read.unshift('asd'); + })); + read.destroy(); +} + +{ + const read = new Readable({ + read() { + }, + }); + + read.on('data', common.mustNotCall()); + read.destroy(); + read.unshift('asd'); +} + +{ + const read = new Readable({ + read() { + }, + }); + + read.resume(); + read.on('data', common.mustNotCall()); + read.on('close', common.mustCall((e) => { + read.push('asd'); + })); + read.destroy(); +} + +{ + const read = new Readable({ + read() { + }, + }); + + read.on('data', common.mustNotCall()); + read.destroy(); + read.push('asd'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-didRead.js b/test/parallel/test-stream-readable-didRead.js new file mode 100644 index 0000000000..d6a093415a --- /dev/null +++ b/test/parallel/test-stream-readable-didRead.js @@ -0,0 +1,126 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { isDisturbed, isErrored, Readable } = require('../../lib'); + +function noop() {} + +function check(readable, data, fn) { + assert.strictEqual(readable.readableDidRead, false); + assert.strictEqual(isDisturbed(readable), false); + assert.strictEqual(isErrored(readable), false); + if (data === -1) { + readable.on('error', common.mustCall(() => { + assert.strictEqual(isErrored(readable), true); + })); + readable.on('data', common.mustNotCall()); + readable.on('end', common.mustNotCall()); + } else { + readable.on('error', common.mustNotCall()); + if (data === -2) { + readable.on('end', common.mustNotCall()); + } else { + readable.on('end', common.mustCall()); + } + if (data > 0) { + readable.on('data', common.mustCallAtLeast(data)); + } else { + readable.on('data', common.mustNotCall()); + } + } + readable.on('close', common.mustCall()); + fn(); + setImmediate(() => { + assert.strictEqual(readable.readableDidRead, data > 0); + if (data > 0) { + assert.strictEqual(isDisturbed(readable), true); + } + }); +} + +{ + const readable = new Readable({ + read() { + this.push(null); + } + }); + check(readable, 0, () => { + readable.read(); + }); +} + +{ + const readable = new Readable({ + read() { + this.push(null); + } + }); + check(readable, 0, () => { + readable.resume(); + }); +} + +{ + const readable = new Readable({ + read() { + this.push(null); + } + }); + check(readable, -2, () => { + readable.destroy(); + }); +} + +{ + const readable = new Readable({ + read() { + this.push(null); + } + }); + + check(readable, -1, () => { + readable.destroy(new Error()); + }); +} + +{ + const readable = new Readable({ + read() { + this.push('data'); + this.push(null); + } + }); + + check(readable, 1, () => { + readable.on('data', noop); + }); +} + +{ + const readable = new Readable({ + read() { + this.push('data'); + this.push(null); + } + }); + + check(readable, 1, () => { + readable.on('data', noop); + readable.off('data', noop); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-emit-readable-short-stream.js b/test/parallel/test-stream-readable-emit-readable-short-stream.js new file mode 100644 index 0000000000..4caadc96f0 --- /dev/null +++ b/test/parallel/test-stream-readable-emit-readable-short-stream.js @@ -0,0 +1,161 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +{ + const r = new stream.Readable({ + read: common.mustCall(function() { + this.push('content'); + this.push(null); + }) + }); + + const t = new stream.Transform({ + transform: common.mustCall(function(chunk, encoding, callback) { + this.push(chunk); + return callback(); + }), + flush: common.mustCall(function(callback) { + return callback(); + }) + }); + + r.pipe(t); + t.on('readable', common.mustCall(function() { + while (true) { + const chunk = t.read(); + if (!chunk) + break; + + assert.strictEqual(chunk.toString(), 'content'); + } + }, 2)); +} + +{ + const t = new stream.Transform({ + transform: common.mustCall(function(chunk, encoding, callback) { + this.push(chunk); + return callback(); + }), + flush: common.mustCall(function(callback) { + return callback(); + }) + }); + + t.end('content'); + + t.on('readable', common.mustCall(function() { + while (true) { + const chunk = t.read(); + if (!chunk) + break; + assert.strictEqual(chunk.toString(), 'content'); + } + })); +} + +{ + const t = new stream.Transform({ + transform: common.mustCall(function(chunk, encoding, callback) { + this.push(chunk); + return callback(); + }), + flush: common.mustCall(function(callback) { + return callback(); + }) + }); + + t.write('content'); + t.end(); + + t.on('readable', common.mustCall(function() { + while (true) { + const chunk = t.read(); + if (!chunk) + break; + assert.strictEqual(chunk.toString(), 'content'); + } + })); +} + +{ + const t = new stream.Readable({ + read() { + } + }); + + t.on('readable', common.mustCall(function() { + while (true) { + const chunk = t.read(); + if (!chunk) + break; + assert.strictEqual(chunk.toString(), 'content'); + } + })); + + t.push('content'); + t.push(null); +} + +{ + const t = new stream.Readable({ + read() { + } + }); + + t.on('readable', common.mustCall(function() { + while (true) { + const chunk = t.read(); + if (!chunk) + break; + assert.strictEqual(chunk.toString(), 'content'); + } + }, 2)); + + process.nextTick(() => { + t.push('content'); + t.push(null); + }); +} + +{ + const t = new stream.Transform({ + transform: common.mustCall(function(chunk, encoding, callback) { + this.push(chunk); + return callback(); + }), + flush: common.mustCall(function(callback) { + return callback(); + }) + }); + + t.on('readable', common.mustCall(function() { + while (true) { + const chunk = t.read(); + if (!chunk) + break; + assert.strictEqual(chunk.toString(), 'content'); + } + }, 2)); + + t.write('content'); + t.end(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-emittedReadable.js b/test/parallel/test-stream-readable-emittedReadable.js new file mode 100644 index 0000000000..ccb71ab1c5 --- /dev/null +++ b/test/parallel/test-stream-readable-emittedReadable.js @@ -0,0 +1,88 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const Readable = require('../../lib').Readable; + +const readable = new Readable({ + read: () => {} +}); + +// Initialized to false. +assert.strictEqual(readable._readableState.emittedReadable, false); + +const expected = [Buffer.from('foobar'), Buffer.from('quo'), null]; +readable.on('readable', common.mustCall(() => { + // emittedReadable should be true when the readable event is emitted + assert.strictEqual(readable._readableState.emittedReadable, true); + assert.deepStrictEqual(readable.read(), expected.shift()); + // emittedReadable is reset to false during read() + assert.strictEqual(readable._readableState.emittedReadable, false); +}, 3)); + +// When the first readable listener is just attached, +// emittedReadable should be false +assert.strictEqual(readable._readableState.emittedReadable, false); + +// These trigger a single 'readable', as things are batched up +process.nextTick(common.mustCall(() => { + readable.push('foo'); +})); +process.nextTick(common.mustCall(() => { + readable.push('bar'); +})); + +// These triggers two readable events +setImmediate(common.mustCall(() => { + readable.push('quo'); + process.nextTick(common.mustCall(() => { + readable.push(null); + })); +})); + +const noRead = new Readable({ + read: () => {} +}); + +noRead.on('readable', common.mustCall(() => { + // emittedReadable should be true when the readable event is emitted + assert.strictEqual(noRead._readableState.emittedReadable, true); + noRead.read(0); + // emittedReadable is not reset during read(0) + assert.strictEqual(noRead._readableState.emittedReadable, true); +})); + +noRead.push('foo'); +noRead.push(null); + +const flowing = new Readable({ + read: () => {} +}); + +flowing.on('data', common.mustCall(() => { + // When in flowing mode, emittedReadable is always false. + assert.strictEqual(flowing._readableState.emittedReadable, false); + flowing.read(); + assert.strictEqual(flowing._readableState.emittedReadable, false); +}, 3)); + +flowing.push('foooo'); +flowing.push('bar'); +flowing.push('quo'); +process.nextTick(common.mustCall(() => { + flowing.push(null); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-end-destroyed.js b/test/parallel/test-stream-readable-end-destroyed.js new file mode 100644 index 0000000000..4bf5929e0b --- /dev/null +++ b/test/parallel/test-stream-readable-end-destroyed.js @@ -0,0 +1,32 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable } = require('../../lib'); + +{ + // Don't emit 'end' after 'close'. + + const r = new Readable(); + + r.on('end', common.mustNotCall()); + r.resume(); + r.destroy(); + r.on('close', common.mustCall(() => { + r.push(null); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-ended.js b/test/parallel/test-stream-readable-ended.js new file mode 100644 index 0000000000..7ebc6878b7 --- /dev/null +++ b/test/parallel/test-stream-readable-ended.js @@ -0,0 +1,61 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable } = require('../../lib'); +const assert = require('assert'); + +// basic +{ + // Find it on Readable.prototype + assert(Reflect.has(Readable.prototype, 'readableEnded')); +} + +// event +{ + const readable = new Readable(); + + readable._read = () => { + // The state ended should start in false. + assert.strictEqual(readable.readableEnded, false); + readable.push('asd'); + assert.strictEqual(readable.readableEnded, false); + readable.push(null); + assert.strictEqual(readable.readableEnded, false); + }; + + readable.on('end', common.mustCall(() => { + assert.strictEqual(readable.readableEnded, true); + })); + + readable.on('data', common.mustCall(() => { + assert.strictEqual(readable.readableEnded, false); + })); +} + +// Verifies no `error` triggered on multiple .push(null) invocations +{ + const readable = new Readable(); + + readable.on('readable', () => { readable.read(); }); + readable.on('error', common.mustNotCall()); + readable.on('end', common.mustCall()); + + readable.push('a'); + readable.push(null); + readable.push(null); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-error-end.js b/test/parallel/test-stream-readable-error-end.js new file mode 100644 index 0000000000..15dafa970d --- /dev/null +++ b/test/parallel/test-stream-readable-error-end.js @@ -0,0 +1,30 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable } = require('../../lib'); + +{ + const r = new Readable({ read() {} }); + + r.on('end', common.mustNotCall()); + r.on('data', common.mustCall()); + r.on('error', common.mustCall()); + r.push('asd'); + r.push(null); + r.destroy(new Error('kaboom')); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js new file mode 100644 index 0000000000..7823c68082 --- /dev/null +++ b/test/parallel/test-stream-readable-event.js @@ -0,0 +1,143 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const Readable = require('../../lib').Readable; + +{ + // First test, not reading when the readable is added. + // make sure that on('readable', ...) triggers a readable event. + const r = new Readable({ + highWaterMark: 3 + }); + + r._read = common.mustNotCall(); + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')); + + setTimeout(function() { + // We're testing what we think we are + assert(!r._readableState.reading); + r.on('readable', common.mustCall()); + }, 1); +} + +{ + // Second test, make sure that readable is re-emitted if there's + // already a length, while it IS reading. + + const r = new Readable({ + highWaterMark: 3 + }); + + r._read = common.mustCall(); + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('bl')); + + setTimeout(function() { + // Assert we're testing what we think we are + assert(r._readableState.reading); + r.on('readable', common.mustCall()); + }, 1); +} + +{ + // Third test, not reading when the stream has not passed + // the highWaterMark but *has* reached EOF. + const r = new Readable({ + highWaterMark: 30 + }); + + r._read = common.mustNotCall(); + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')); + r.push(null); + + setTimeout(function() { + // Assert we're testing what we think we are + assert(!r._readableState.reading); + r.on('readable', common.mustCall()); + }, 1); +} + +{ + // Pushing an empty string in non-objectMode should + // trigger next `read()`. + const underlyingData = ['', 'x', 'y', '', 'z']; + const expected = underlyingData.filter((data) => data); + const result = []; + + const r = new Readable({ + encoding: 'utf8', + }); + r._read = function() { + process.nextTick(() => { + if (!underlyingData.length) { + this.push(null); + } else { + this.push(underlyingData.shift()); + } + }); + }; + + r.on('readable', () => { + const data = r.read(); + if (data !== null) result.push(data); + }); + + r.on('end', common.mustCall(() => { + assert.deepStrictEqual(result, expected); + })); +} + +{ + // #20923 + const r = new Readable(); + r._read = function() { + // Actually doing thing here + }; + r.on('data', function() {}); + + r.removeAllListeners(); + + assert.strictEqual(r.eventNames().length, 0); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js new file mode 100644 index 0000000000..9db6e29da1 --- /dev/null +++ b/test/parallel/test-stream-readable-flow-recursion.js @@ -0,0 +1,92 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +// This test verifies that passing a huge number to read(size) +// will push up the highWaterMark, and cause the stream to read +// more data continuously, but without triggering a nextTick +// warning or RangeError. + +const Readable = require('../../lib').Readable; + +// Throw an error if we trigger a nextTick warning. +process.throwDeprecation = true; + +const stream = new Readable({ highWaterMark: 2 }); +let reads = 0; +let total = 5000; +stream._read = function(size) { + reads++; + size = Math.min(size, total); + total -= size; + if (size === 0) + stream.push(null); + else + stream.push(Buffer.allocUnsafe(size)); +}; + +let depth = 0; + +function flow(stream, size, callback) { + depth += 1; + const chunk = stream.read(size); + + if (!chunk) + stream.once('readable', flow.bind(null, stream, size, callback)); + else + callback(chunk); + + depth -= 1; + silentConsole.log(`flow(${depth}): exit`); +} + +flow(stream, 5000, function() { + silentConsole.log(`complete (${depth})`); +}); + +process.on('exit', function(code) { + assert.strictEqual(reads, 2); + // We pushed up the high water mark + assert.strictEqual(stream.readableHighWaterMark, 8192); + // Length is 0 right now, because we pulled it all out. + assert.strictEqual(stream.readableLength, 0); + assert(!code); + assert.strictEqual(depth, 0); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-hwm-0-async.js b/test/parallel/test-stream-readable-hwm-0-async.js new file mode 100644 index 0000000000..f09d93c6ab --- /dev/null +++ b/test/parallel/test-stream-readable-hwm-0-async.js @@ -0,0 +1,42 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); + +// This test ensures that Readable stream will continue to call _read +// for streams with highWaterMark === 0 once the stream returns data +// by calling push() asynchronously. + +const { Readable } = require('../../lib'); + +let count = 5; + +const r = new Readable({ + // Called 6 times: First 5 return data, last one signals end of stream. + read: common.mustCall(() => { + process.nextTick(common.mustCall(() => { + if (count--) + r.push('a'); + else + r.push(null); + })); + }, 6), + highWaterMark: 0, +}); + +r.on('end', common.mustCall()); +r.on('data', common.mustCall(5)); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js new file mode 100644 index 0000000000..15bf64ec56 --- /dev/null +++ b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js @@ -0,0 +1,119 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); + +// Ensure that subscribing the 'data' event will not make the stream flow. +// The 'data' event will require calling read() by hand. +// +// The test is written for the (somewhat rare) highWaterMark: 0 streams to +// specifically catch any regressions that might occur with these streams. + +const assert = require('assert'); +const { Readable } = require('../../lib'); + +const streamData = [ 'a', null ]; + +// Track the calls so we can assert their order later. +const calls = []; +const r = new Readable({ + read: common.mustCall(() => { + calls.push('_read:' + streamData[0]); + process.nextTick(() => { + calls.push('push:' + streamData[0]); + r.push(streamData.shift()); + }); + }, streamData.length), + highWaterMark: 0, + + // Object mode is used here just for testing convenience. It really + // shouldn't affect the order of events. Just the data and its format. + objectMode: true, +}); + +assert.strictEqual(r.readableFlowing, null); +r.on('readable', common.mustCall(() => { + calls.push('readable'); +}, 2)); +assert.strictEqual(r.readableFlowing, false); +r.on('data', common.mustCall((data) => { + calls.push('data:' + data); +}, 1)); +r.on('end', common.mustCall(() => { + calls.push('end'); +})); +assert.strictEqual(r.readableFlowing, false); + +// The stream emits the events asynchronously but that's not guaranteed to +// happen on the next tick (especially since the _read implementation above +// uses process.nextTick). +// +// We use setImmediate here to give the stream enough time to emit all the +// events it's about to emit. +setImmediate(() => { + + // Only the _read, push, readable calls have happened. No data must be + // emitted yet. + assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']); + + // Calling 'r.read()' should trigger the data event. + assert.strictEqual(r.read(), 'a'); + assert.deepStrictEqual( + calls, + ['_read:a', 'push:a', 'readable', 'data:a']); + + // The next 'read()' will return null because hwm: 0 does not buffer any + // data and the _read implementation above does the push() asynchronously. + // + // Note: This 'null' signals "no data available". It isn't the end-of-stream + // null value as the stream doesn't know yet that it is about to reach the + // end. + // + // Using setImmediate again to give the stream enough time to emit all the + // events it wants to emit. + assert.strictEqual(r.read(), null); + setImmediate(() => { + + // There's a new 'readable' event after the data has been pushed. + // The 'end' event will be emitted only after a 'read()'. + // + // This is somewhat special for the case where the '_read' implementation + // calls 'push' asynchronously. If 'push' was synchronous, the 'end' event + // would be emitted here _before_ we call read(). + assert.deepStrictEqual( + calls, + ['_read:a', 'push:a', 'readable', 'data:a', + '_read:null', 'push:null', 'readable']); + + assert.strictEqual(r.read(), null); + + // While it isn't really specified whether the 'end' event should happen + // synchronously with read() or not, we'll assert the current behavior + // ('end' event happening on the next tick after read()) so any changes + // to it are noted and acknowledged in the future. + assert.deepStrictEqual( + calls, + ['_read:a', 'push:a', 'readable', 'data:a', + '_read:null', 'push:null', 'readable']); + process.nextTick(() => { + assert.deepStrictEqual( + calls, + ['_read:a', 'push:a', 'readable', 'data:a', + '_read:null', 'push:null', 'readable', 'end']); + }); + }); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-hwm-0.js b/test/parallel/test-stream-readable-hwm-0.js new file mode 100644 index 0000000000..352fa1eb58 --- /dev/null +++ b/test/parallel/test-stream-readable-hwm-0.js @@ -0,0 +1,45 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); + +// This test ensures that Readable stream will call _read() for streams +// with highWaterMark === 0 upon .read(0) instead of just trying to +// emit 'readable' event. + +const assert = require('assert'); +const { Readable } = require('../../lib'); + +const r = new Readable({ + // Must be called only once upon setting 'readable' listener + read: common.mustCall(), + highWaterMark: 0, +}); + +let pushedNull = false; +// This will trigger read(0) but must only be called after push(null) +// because the we haven't pushed any data +r.on('readable', common.mustCall(() => { + assert.strictEqual(r.read(), null); + assert.strictEqual(pushedNull, true); +})); +r.on('end', common.mustCall()); +process.nextTick(() => { + assert.strictEqual(r.read(), null); + pushedNull = true; + r.push(null); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js new file mode 100644 index 0000000000..a4cf0a625e --- /dev/null +++ b/test/parallel/test-stream-readable-infinite-read.js @@ -0,0 +1,47 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +const buf = Buffer.alloc(8192); + +const readable = new Readable({ + read: common.mustCall(function() { + this.push(buf); + }, 31) +}); + +let i = 0; + +readable.on('readable', common.mustCall(function() { + if (i++ === 10) { + // We will just terminate now. + process.removeAllListeners('readable'); + return; + } + + const data = readable.read(); + // TODO(mcollina): there is something odd in the highWaterMark logic + // investigate. + if (i === 1) { + assert.strictEqual(data.length, 8192 * 2); + } else { + assert.strictEqual(data.length, 8192 * 3); + } +}, 11)); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-invalid-chunk.js b/test/parallel/test-stream-readable-invalid-chunk.js new file mode 100644 index 0000000000..0b6069d9e8 --- /dev/null +++ b/test/parallel/test-stream-readable-invalid-chunk.js @@ -0,0 +1,49 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); + +function testPushArg(val) { + const readable = new stream.Readable({ + read: () => {} + }); + readable.on('error', common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + })); + readable.push(val); +} + +testPushArg([]); +testPushArg({}); +testPushArg(0); + +function testUnshiftArg(val) { + const readable = new stream.Readable({ + read: () => {} + }); + readable.on('error', common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + })); + readable.unshift(val); +} + +testUnshiftArg([]); +testUnshiftArg({}); +testUnshiftArg(0); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js new file mode 100644 index 0000000000..45028dc027 --- /dev/null +++ b/test/parallel/test-stream-readable-needReadable.js @@ -0,0 +1,114 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const Readable = require('../../lib').Readable; + +const readable = new Readable({ + read: () => {} +}); + +// Initialized to false. +assert.strictEqual(readable._readableState.needReadable, false); + +readable.on('readable', common.mustCall(() => { + // When the readable event fires, needReadable is reset. + assert.strictEqual(readable._readableState.needReadable, false); + readable.read(); +})); + +// If a readable listener is attached, then a readable event is needed. +assert.strictEqual(readable._readableState.needReadable, true); + +readable.push('foo'); +readable.push(null); + +readable.on('end', common.mustCall(() => { + // No need to emit readable anymore when the stream ends. + assert.strictEqual(readable._readableState.needReadable, false); +})); + +const asyncReadable = new Readable({ + read: () => {} +}); + +asyncReadable.on('readable', common.mustCall(() => { + if (asyncReadable.read() !== null) { + // After each read(), the buffer is empty. + // If the stream doesn't end now, + // then we need to notify the reader on future changes. + assert.strictEqual(asyncReadable._readableState.needReadable, true); + } +}, 2)); + +process.nextTick(common.mustCall(() => { + asyncReadable.push('foooo'); +})); +process.nextTick(common.mustCall(() => { + asyncReadable.push('bar'); +})); +setImmediate(common.mustCall(() => { + asyncReadable.push(null); + assert.strictEqual(asyncReadable._readableState.needReadable, false); +})); + +const flowing = new Readable({ + read: () => {} +}); + +// Notice this must be above the on('data') call. +flowing.push('foooo'); +flowing.push('bar'); +flowing.push('quo'); +process.nextTick(common.mustCall(() => { + flowing.push(null); +})); + +// When the buffer already has enough data, and the stream is +// in flowing mode, there is no need for the readable event. +flowing.on('data', common.mustCall(function(data) { + assert.strictEqual(flowing._readableState.needReadable, false); +}, 3)); + +const slowProducer = new Readable({ + read: () => {} +}); + +slowProducer.on('readable', common.mustCall(() => { + const chunk = slowProducer.read(8); + const state = slowProducer._readableState; + if (chunk === null) { + // The buffer doesn't have enough data, and the stream is not need, + // we need to notify the reader when data arrives. + assert.strictEqual(state.needReadable, true); + } else { + assert.strictEqual(state.needReadable, false); + } +}, 4)); + +process.nextTick(common.mustCall(() => { + slowProducer.push('foo'); + process.nextTick(common.mustCall(() => { + slowProducer.push('foo'); + process.nextTick(common.mustCall(() => { + slowProducer.push('foo'); + process.nextTick(common.mustCall(() => { + slowProducer.push(null); + })); + })); + })); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-next-no-null.js b/test/parallel/test-stream-readable-next-no-null.js new file mode 100644 index 0000000000..7fa8c8776d --- /dev/null +++ b/test/parallel/test-stream-readable-next-no-null.js @@ -0,0 +1,34 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const { mustNotCall, expectsError } = require('../common'); +const { Readable } = require('../../lib'); + +async function* generate() { + yield null; +} + +const stream = Readable.from(generate()); + +stream.on('error', expectsError({ + code: 'ERR_STREAM_NULL_VALUES', + name: 'TypeError', + message: 'May not write null values to stream' +})); + +stream.on('data', mustNotCall((chunk) => {})); + +stream.on('end', mustNotCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-no-unneeded-readable.js b/test/parallel/test-stream-readable-no-unneeded-readable.js new file mode 100644 index 0000000000..36dc08f064 --- /dev/null +++ b/test/parallel/test-stream-readable-no-unneeded-readable.js @@ -0,0 +1,77 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Readable, PassThrough } = require('../../lib'); + +function test(r) { + const wrapper = new Readable({ + read: () => { + let data = r.read(); + + if (data) { + wrapper.push(data); + return; + } + + r.once('readable', function() { + data = r.read(); + if (data) { + wrapper.push(data); + } + // else: the end event should fire + }); + }, + }); + + r.once('end', function() { + wrapper.push(null); + }); + + wrapper.resume(); + wrapper.once('end', common.mustCall()); +} + +{ + const source = new Readable({ + read: () => {} + }); + source.push('foo'); + source.push('bar'); + source.push(null); + + const pt = source.pipe(new PassThrough()); + test(pt); +} + +{ + // This is the underlying cause of the above test case. + const pushChunks = ['foo', 'bar']; + const r = new Readable({ + read: () => { + const chunk = pushChunks.shift(); + if (chunk) { + // synchronous call + r.push(chunk); + } else { + // asynchronous call + process.nextTick(() => r.push(null)); + } + }, + }); + + test(r); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js new file mode 100644 index 0000000000..66fc2b2354 --- /dev/null +++ b/test/parallel/test-stream-readable-object-multi-push-async.js @@ -0,0 +1,198 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +const MAX = 42; +const BATCH = 10; + +{ + const readable = new Readable({ + objectMode: true, + read: common.mustCall(function() { + silentConsole.log('>> READ'); + fetchData((err, data) => { + if (err) { + this.destroy(err); + return; + } + + if (data.length === 0) { + silentConsole.log('pushing null'); + this.push(null); + return; + } + + silentConsole.log('pushing'); + data.forEach((d) => this.push(d)); + }); + }, Math.floor(MAX / BATCH) + 2) + }); + + let i = 0; + function fetchData(cb) { + if (i > MAX) { + setTimeout(cb, 10, null, []); + } else { + const array = []; + const max = i + BATCH; + for (; i < max; i++) { + array.push(i); + } + setTimeout(cb, 10, null, array); + } + } + + readable.on('readable', () => { + let data; + silentConsole.log('readable emitted'); + while ((data = readable.read()) !== null) { + silentConsole.log(data); + } + }); + + readable.on('end', common.mustCall(() => { + assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); + })); +} + +{ + const readable = new Readable({ + objectMode: true, + read: common.mustCall(function() { + silentConsole.log('>> READ'); + fetchData((err, data) => { + if (err) { + this.destroy(err); + return; + } + + if (data.length === 0) { + silentConsole.log('pushing null'); + this.push(null); + return; + } + + silentConsole.log('pushing'); + data.forEach((d) => this.push(d)); + }); + }, Math.floor(MAX / BATCH) + 2) + }); + + let i = 0; + function fetchData(cb) { + if (i > MAX) { + setTimeout(cb, 10, null, []); + } else { + const array = []; + const max = i + BATCH; + for (; i < max; i++) { + array.push(i); + } + setTimeout(cb, 10, null, array); + } + } + + readable.on('data', (data) => { + silentConsole.log('data emitted', data); + }); + + readable.on('end', common.mustCall(() => { + assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); + })); +} + +{ + const readable = new Readable({ + objectMode: true, + read: common.mustCall(function() { + silentConsole.log('>> READ'); + fetchData((err, data) => { + if (err) { + this.destroy(err); + return; + } + + silentConsole.log('pushing'); + data.forEach((d) => this.push(d)); + + if (data[BATCH - 1] >= MAX) { + silentConsole.log('pushing null'); + this.push(null); + } + }); + }, Math.floor(MAX / BATCH) + 1) + }); + + let i = 0; + function fetchData(cb) { + const array = []; + const max = i + BATCH; + for (; i < max; i++) { + array.push(i); + } + setTimeout(cb, 10, null, array); + } + + readable.on('data', (data) => { + silentConsole.log('data emitted', data); + }); + + readable.on('end', common.mustCall(() => { + assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); + })); +} + +{ + const readable = new Readable({ + objectMode: true, + read: common.mustNotCall() + }); + + readable.on('data', common.mustNotCall()); + + readable.push(null); + + let nextTickPassed = false; + process.nextTick(() => { + nextTickPassed = true; + }); + + readable.on('end', common.mustCall(() => { + assert.strictEqual(nextTickPassed, true); + })); +} + +{ + const readable = new Readable({ + objectMode: true, + read: common.mustCall() + }); + + readable.on('data', (data) => { + silentConsole.log('data emitted', data); + }); + + readable.on('end', common.mustCall()); + + setImmediate(() => { + readable.push('aaa'); + readable.push(null); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-pause-and-resume.js b/test/parallel/test-stream-readable-pause-and-resume.js new file mode 100644 index 0000000000..005b799528 --- /dev/null +++ b/test/parallel/test-stream-readable-pause-and-resume.js @@ -0,0 +1,89 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +let ticks = 18; +let expectedData = 19; + +const rs = new Readable({ + objectMode: true, + read: () => { + if (ticks-- > 0) + return process.nextTick(() => rs.push({})); + rs.push({}); + rs.push(null); + } +}); + +rs.on('end', common.mustCall()); +readAndPause(); + +function readAndPause() { + // Does a on(data) -> pause -> wait -> resume -> on(data) ... loop. + // Expects on(data) to never fire if the stream is paused. + const ondata = common.mustCall((data) => { + rs.pause(); + + expectedData--; + if (expectedData <= 0) + return; + + setImmediate(function() { + rs.removeListener('data', ondata); + readAndPause(); + rs.resume(); + }); + }, 1); // Only call ondata once + + rs.on('data', ondata); +} + +{ + const readable = new Readable({ + read() {} + }); + + function read() {} + + readable.setEncoding('utf8'); + readable.on('readable', read); + readable.removeListener('readable', read); + readable.pause(); + + process.nextTick(function() { + assert(readable.isPaused()); + }); +} + +{ + const { PassThrough } = require('../../lib'); + + const source3 = new PassThrough(); + const target3 = new PassThrough(); + + const chunk = Buffer.allocUnsafe(1000); + while (target3.write(chunk)); + + source3.pipe(target3); + target3.on('drain', common.mustCall(() => { + assert(!source3.isPaused()); + })); + target3.on('data', () => {}); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-readable-then-resume.js b/test/parallel/test-stream-readable-readable-then-resume.js new file mode 100644 index 0000000000..c4f8206d63 --- /dev/null +++ b/test/parallel/test-stream-readable-readable-then-resume.js @@ -0,0 +1,46 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable } = require('../../lib'); +const assert = require('assert'); + +// This test verifies that a stream could be resumed after +// removing the readable event in the same tick + +check(new Readable({ + objectMode: true, + highWaterMark: 1, + read() { + if (!this.first) { + this.push('hello'); + this.first = true; + return; + } + + this.push(null); + } +})); + +function check(s) { + const readableListener = common.mustNotCall(); + s.on('readable', readableListener); + s.on('end', common.mustCall()); + assert.strictEqual(s.removeListener, s.off); + s.removeListener('readable', readableListener); + s.resume(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-readable.js b/test/parallel/test-stream-readable-readable.js new file mode 100644 index 0000000000..42b7ab5c7f --- /dev/null +++ b/test/parallel/test-stream-readable-readable.js @@ -0,0 +1,60 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const { Readable } = require('../../lib'); + +{ + const r = new Readable({ + read() {} + }); + assert.strictEqual(r.readable, true); + r.destroy(); + assert.strictEqual(r.readable, false); +} + +{ + const mustNotCall = common.mustNotCall(); + const r = new Readable({ + read() {} + }); + assert.strictEqual(r.readable, true); + r.on('end', mustNotCall); + r.resume(); + r.push(null); + assert.strictEqual(r.readable, true); + r.off('end', mustNotCall); + r.on('end', common.mustCall(() => { + assert.strictEqual(r.readable, false); + })); +} + +{ + const r = new Readable({ + read: common.mustCall(() => { + process.nextTick(() => { + r.destroy(new Error()); + assert.strictEqual(r.readable, false); + }); + }) + }); + r.resume(); + r.on('error', common.mustCall(() => { + assert.strictEqual(r.readable, false); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js new file mode 100644 index 0000000000..447bbbf9b5 --- /dev/null +++ b/test/parallel/test-stream-readable-reading-readingMore.js @@ -0,0 +1,186 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const Readable = require('../../lib').Readable; + +{ + const readable = new Readable({ + read(size) {} + }); + + const state = readable._readableState; + + // Starting off with false initially. + assert.strictEqual(state.reading, false); + assert.strictEqual(state.readingMore, false); + + readable.on('data', common.mustCall((data) => { + // While in a flowing state with a 'readable' listener + // we should not be reading more + if (readable.readableFlowing) + assert.strictEqual(state.readingMore, true); + + // Reading as long as we've not ended + assert.strictEqual(state.reading, !state.ended); + }, 2)); + + function onStreamEnd() { + // End of stream; state.reading is false + // And so should be readingMore. + assert.strictEqual(state.readingMore, false); + assert.strictEqual(state.reading, false); + } + + const expectedReadingMore = [true, true, false]; + readable.on('readable', common.mustCall(() => { + // There is only one readingMore scheduled from on('data'), + // after which everything is governed by the .read() call + assert.strictEqual(state.readingMore, expectedReadingMore.shift()); + + // If the stream has ended, we shouldn't be reading + assert.strictEqual(state.ended, !state.reading); + + // Consume all the data + while (readable.read() !== null); + + if (expectedReadingMore.length === 0) // Reached end of stream + process.nextTick(common.mustCall(onStreamEnd, 1)); + }, 3)); + + readable.on('end', common.mustCall(onStreamEnd)); + readable.push('pushed'); + + readable.read(6); + + // reading + assert.strictEqual(state.reading, true); + assert.strictEqual(state.readingMore, true); + + // add chunk to front + readable.unshift('unshifted'); + + // end + readable.push(null); +} + +{ + const readable = new Readable({ + read(size) {} + }); + + const state = readable._readableState; + + // Starting off with false initially. + assert.strictEqual(state.reading, false); + assert.strictEqual(state.readingMore, false); + + readable.on('data', common.mustCall((data) => { + // While in a flowing state without a 'readable' listener + // we should be reading more + if (readable.readableFlowing) + assert.strictEqual(state.readingMore, true); + + // Reading as long as we've not ended + assert.strictEqual(state.reading, !state.ended); + }, 2)); + + function onStreamEnd() { + // End of stream; state.reading is false + // And so should be readingMore. + assert.strictEqual(state.readingMore, false); + assert.strictEqual(state.reading, false); + } + + readable.on('end', common.mustCall(onStreamEnd)); + readable.push('pushed'); + + // Stop emitting 'data' events + assert.strictEqual(state.flowing, true); + readable.pause(); + + // paused + assert.strictEqual(state.reading, false); + assert.strictEqual(state.flowing, false); + + readable.resume(); + assert.strictEqual(state.reading, false); + assert.strictEqual(state.flowing, true); + + // add chunk to front + readable.unshift('unshifted'); + + // end + readable.push(null); +} + +{ + const readable = new Readable({ + read(size) {} + }); + + const state = readable._readableState; + + // Starting off with false initially. + assert.strictEqual(state.reading, false); + assert.strictEqual(state.readingMore, false); + + const onReadable = common.mustNotCall; + + readable.on('readable', onReadable); + + readable.on('data', common.mustCall((data) => { + // Reading as long as we've not ended + assert.strictEqual(state.reading, !state.ended); + }, 2)); + + readable.removeListener('readable', onReadable); + + function onStreamEnd() { + // End of stream; state.reading is false + // And so should be readingMore. + assert.strictEqual(state.readingMore, false); + assert.strictEqual(state.reading, false); + } + + readable.on('end', common.mustCall(onStreamEnd)); + readable.push('pushed'); + + // We are still not flowing, we will be resuming in the next tick + assert.strictEqual(state.flowing, false); + + // Wait for nextTick, so the readableListener flag resets + process.nextTick(function() { + readable.resume(); + + // Stop emitting 'data' events + assert.strictEqual(state.flowing, true); + readable.pause(); + + // paused + assert.strictEqual(state.flowing, false); + + readable.resume(); + assert.strictEqual(state.flowing, true); + + // add chunk to front + readable.unshift('unshifted'); + + // end + readable.push(null); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js new file mode 100644 index 0000000000..c32f6487c4 --- /dev/null +++ b/test/parallel/test-stream-readable-resume-hwm.js @@ -0,0 +1,36 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Readable } = require('../../lib'); + +// readable.resume() should not lead to a ._read() call being scheduled +// when we exceed the high water mark already. + +const readable = new Readable({ + read: common.mustNotCall(), + highWaterMark: 100 +}); + +// Fill up the internal buffer so that we definitely exceed the HWM: +for (let i = 0; i < 10; i++) + readable.push('a'.repeat(200)); + +// Call resume, and pause after one chunk. +// The .pause() is just so that we don’t empty the buffer fully, which would +// be a valid reason to call ._read(). +readable.resume(); +readable.once('data', common.mustCall(() => readable.pause())); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js new file mode 100644 index 0000000000..9c0da6384f --- /dev/null +++ b/test/parallel/test-stream-readable-resumeScheduled.js @@ -0,0 +1,80 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +// Testing Readable Stream resumeScheduled state + +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); + +{ + // pipe() test case + const r = new Readable({ read() {} }); + const w = new Writable(); + + // resumeScheduled should start = `false`. + assert.strictEqual(r._readableState.resumeScheduled, false); + + // Calling pipe() should change the state value = true. + r.pipe(w); + assert.strictEqual(r._readableState.resumeScheduled, true); + + process.nextTick(common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false); + })); +} + +{ + // 'data' listener test case + const r = new Readable({ read() {} }); + + // resumeScheduled should start = `false`. + assert.strictEqual(r._readableState.resumeScheduled, false); + + r.push(Buffer.from([1, 2, 3])); + + // Adding 'data' listener should change the state value + r.on('data', common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false); + })); + assert.strictEqual(r._readableState.resumeScheduled, true); + + process.nextTick(common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false); + })); +} + +{ + // resume() test case + const r = new Readable({ read() {} }); + + // resumeScheduled should start = `false`. + assert.strictEqual(r._readableState.resumeScheduled, false); + + // Calling resume() should change the state value. + r.resume(); + assert.strictEqual(r._readableState.resumeScheduled, true); + + r.on('resume', common.mustCall(() => { + // The state value should be `false` again + assert.strictEqual(r._readableState.resumeScheduled, false); + })); + + process.nextTick(common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js new file mode 100644 index 0000000000..2bc8536716 --- /dev/null +++ b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js @@ -0,0 +1,75 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const { Readable } = require('../../lib'); +const assert = require('assert'); + +{ + // Call .setEncoding() while there are bytes already in the buffer. + const r = new Readable({ read() {} }); + + r.push(Buffer.from('a')); + r.push(Buffer.from('b')); + + r.setEncoding('utf8'); + const chunks = []; + r.on('data', (chunk) => chunks.push(chunk)); + + process.nextTick(() => { + assert.deepStrictEqual(chunks, ['ab']); + }); +} + +{ + // Call .setEncoding() while the buffer contains a complete, + // but chunked character. + const r = new Readable({ read() {} }); + + r.push(Buffer.from([0xf0])); + r.push(Buffer.from([0x9f])); + r.push(Buffer.from([0x8e])); + r.push(Buffer.from([0x89])); + + r.setEncoding('utf8'); + const chunks = []; + r.on('data', (chunk) => chunks.push(chunk)); + + process.nextTick(() => { + assert.deepStrictEqual(chunks, ['🎉']); + }); +} + +{ + // Call .setEncoding() while the buffer contains an incomplete character, + // and finish the character later. + const r = new Readable({ read() {} }); + + r.push(Buffer.from([0xf0])); + r.push(Buffer.from([0x9f])); + + r.setEncoding('utf8'); + + r.push(Buffer.from([0x8e])); + r.push(Buffer.from([0x89])); + + const chunks = []; + r.on('data', (chunk) => chunks.push(chunk)); + + process.nextTick(() => { + assert.deepStrictEqual(chunks, ['🎉']); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-setEncoding-null.js b/test/parallel/test-stream-readable-setEncoding-null.js new file mode 100644 index 0000000000..3810288be0 --- /dev/null +++ b/test/parallel/test-stream-readable-setEncoding-null.js @@ -0,0 +1,30 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + + +{ + const readable = new Readable({ encoding: 'hex' }); + assert.strictEqual(readable._readableState.encoding, 'hex'); + + readable.setEncoding(null); + + assert.strictEqual(readable._readableState.encoding, 'utf8'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-unpipe-resume.js b/test/parallel/test-stream-readable-unpipe-resume.js new file mode 100644 index 0000000000..8559e8a743 --- /dev/null +++ b/test/parallel/test-stream-readable-unpipe-resume.js @@ -0,0 +1,35 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); +const fs = require('fs'); + +const readStream = fs.createReadStream(process.execPath); + +const transformStream = new stream.Transform({ + transform: common.mustCall(() => { + readStream.unpipe(); + readStream.resume(); + }) +}); + +readStream.on('end', common.mustCall()); + +readStream + .pipe(transformStream) + .resume(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-unshift.js b/test/parallel/test-stream-readable-unshift.js new file mode 100644 index 0000000000..0e77bec1f4 --- /dev/null +++ b/test/parallel/test-stream-readable-unshift.js @@ -0,0 +1,185 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +{ + // Check that strings are saved as Buffer + const readable = new Readable({ read() {} }); + + const string = 'abc'; + + readable.on('data', common.mustCall((chunk) => { + assert(Buffer.isBuffer(chunk)); + assert.strictEqual(chunk.toString('utf8'), string); + }, 1)); + + readable.unshift(string); + +} + +{ + // Check that data goes at the beginning + const readable = new Readable({ read() {} }); + const unshift = 'front'; + const push = 'back'; + + const expected = [unshift, push]; + readable.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString('utf8'), expected.shift()); + }, 2)); + + + readable.push(push); + readable.unshift(unshift); +} + +{ + // Check that buffer is saved with correct encoding + const readable = new Readable({ read() {} }); + + const encoding = 'base64'; + const string = Buffer.from('abc').toString(encoding); + + readable.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(encoding), string); + }, 1)); + + readable.unshift(string, encoding); + +} + +{ + + const streamEncoding = 'base64'; + + function checkEncoding(readable) { + + // chunk encodings + const encodings = ['utf8', 'binary', 'hex', 'base64']; + const expected = []; + + readable.on('data', common.mustCall((chunk) => { + const { encoding, string } = expected.pop(); + assert.strictEqual(chunk.toString(encoding), string); + }, encodings.length)); + + for (const encoding of encodings) { + const string = 'abc'; + + // If encoding is the same as the state.encoding the string is + // saved as is + const expect = encoding !== streamEncoding ? + Buffer.from(string, encoding).toString(streamEncoding) : string; + + expected.push({ encoding, string: expect }); + + readable.unshift(string, encoding); + } + } + + const r1 = new Readable({ read() {} }); + r1.setEncoding(streamEncoding); + checkEncoding(r1); + + const r2 = new Readable({ read() {}, encoding: streamEncoding }); + checkEncoding(r2); + +} + +{ + // Both .push & .unshift should have the same behaviour + // When setting an encoding, each chunk should be emitted with that encoding + const encoding = 'base64'; + + function checkEncoding(readable) { + const string = 'abc'; + readable.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, Buffer.from(string).toString(encoding)); + }, 2)); + + readable.push(string); + readable.unshift(string); + } + + const r1 = new Readable({ read() {} }); + r1.setEncoding(encoding); + checkEncoding(r1); + + const r2 = new Readable({ read() {}, encoding }); + checkEncoding(r2); + +} + +{ + // Check that ObjectMode works + const readable = new Readable({ objectMode: true, read() {} }); + + const chunks = ['a', 1, {}, []]; + + readable.on('data', common.mustCall((chunk) => { + assert.strictEqual(chunk, chunks.pop()); + }, chunks.length)); + + for (const chunk of chunks) { + readable.unshift(chunk); + } +} + +{ + + // Should not throw: https://github.com/nodejs/node/issues/27192 + const highWaterMark = 50; + class ArrayReader extends Readable { + constructor(opt) { + super({ highWaterMark }); + // The error happened only when pushing above hwm + this.buffer = new Array(highWaterMark * 2).fill(0).map(String); + } + _read(size) { + while (this.buffer.length) { + const chunk = this.buffer.shift(); + if (!this.buffer.length) { + this.push(chunk); + this.push(null); + return true; + } + if (!this.push(chunk)) + return; + } + } + } + + function onRead() { + while (null !== (stream.read())) { + // Remove the 'readable' listener before unshifting + stream.removeListener('readable', onRead); + stream.unshift('a'); + stream.on('data', (chunk) => { + silentConsole.log(chunk.length); + }); + break; + } + } + + const stream = new ArrayReader(); + stream.once('readable', common.mustCall(onRead)); + stream.on('end', common.mustCall(() => {})); + +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js new file mode 100644 index 0000000000..d244bc9ac1 --- /dev/null +++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js @@ -0,0 +1,28 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Readable } = require('../../lib'); + +const readable = new Readable(); + +readable.read(); +readable.on('error', common.expectsError({ + code: 'ERR_METHOD_NOT_IMPLEMENTED', + name: 'Error', + message: 'The _read() method is not implemented' +})); +readable.on('close', common.mustCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readableListening-state.js b/test/parallel/test-stream-readableListening-state.js new file mode 100644 index 0000000000..6738463ade --- /dev/null +++ b/test/parallel/test-stream-readableListening-state.js @@ -0,0 +1,49 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +const r = new stream.Readable({ + read: () => {} +}); + +// readableListening state should start in `false`. +assert.strictEqual(r._readableState.readableListening, false); + +r.on('readable', common.mustCall(() => { + // Inside the readable event this state should be true. + assert.strictEqual(r._readableState.readableListening, true); +})); + +r.push(Buffer.from('Testing readableListening state')); + +const r2 = new stream.Readable({ + read: () => {} +}); + +// readableListening state should start in `false`. +assert.strictEqual(r2._readableState.readableListening, false); + +r2.on('data', common.mustCall((chunk) => { + // readableListening should be false because we don't have + // a `readable` listener + assert.strictEqual(r2._readableState.readableListening, false); +})); + +r2.push(Buffer.from('Testing readableListening state')); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js new file mode 100644 index 0000000000..58c7c2db92 --- /dev/null +++ b/test/parallel/test-stream-reduce.js @@ -0,0 +1,147 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Readable, +} = require('../../lib'); +const assert = require('assert'); + +function sum(p, c) { + return p + c; +} + +{ + // Does the same thing as `(await stream.toArray()).reduce(...)` + (async () => { + const tests = [ + [[], sum, 0], + [[1], sum, 0], + [[1, 2, 3, 4, 5], sum, 0], + [[...Array(100).keys()], sum, 0], + [['a', 'b', 'c'], sum, ''], + [[1, 2], sum], + [[1, 2, 3], (x, y) => y], + ]; + for (const [values, fn, initial] of tests) { + const streamReduce = await Readable.from(values) + .reduce(fn, initial); + const arrayReduce = values.reduce(fn, initial); + assert.deepStrictEqual(streamReduce, arrayReduce); + } + // Does the same thing as `(await stream.toArray()).reduce(...)` with an + // asynchronous reducer + for (const [values, fn, initial] of tests) { + const streamReduce = await Readable.from(values) + .map(async (x) => x) + .reduce(fn, initial); + const arrayReduce = values.reduce(fn, initial); + assert.deepStrictEqual(streamReduce, arrayReduce); + } + })().then(common.mustCall()); +} +{ + // Works with an async reducer, with or without initial value + (async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0); + assert.strictEqual(six, 6); + })().then(common.mustCall()); + (async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c); + assert.strictEqual(six, 6); + })().then(common.mustCall()); +} +{ + // Works lazily + assert.rejects(Readable.from([1, 2, 3, 4, 5, 6]) + .map(common.mustCall((x) => { + return x; + }, 3)) // Two consumed and one buffered by `map` due to default concurrency + .reduce(async (p, c) => { + if (p === 1) { + throw new Error('boom'); + } + return c; + }, 0) + , /boom/).then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + assert.rejects(async () => { + await Readable.from([1, 2, 3]).reduce(async (p, c) => { + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: ac.signal }); + }, { + name: 'AbortError', + }).then(common.mustCall()); + ac.abort(); +} + + +{ + // Support for AbortSignal - pre aborted + const stream = Readable.from([1, 2, 3]); + assert.rejects(async () => { + await stream.reduce(async (p, c) => { + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: AbortSignal.abort() }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + assert.strictEqual(stream.destroyed, true); + })); +} + +{ + // Support for AbortSignal - deep + const stream = Readable.from([1, 2, 3]); + assert.rejects(async () => { + await stream.reduce(async (p, c, { signal }) => { + signal.addEventListener('abort', common.mustCall(), { once: true }); + if (c === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(); + }, 0, { signal: AbortSignal.abort() }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + assert.strictEqual(stream.destroyed, true); + })); +} + +{ + // Error cases + assert.rejects(() => Readable.from([]).reduce(1), /TypeError/); + assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/); + assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, 1), /ERR_INVALID_ARG_TYPE/); + assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, { signal: true }), /ERR_INVALID_ARG_TYPE/); +} + +{ + // Test result is a Promise + const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0); + assert.ok(result instanceof Promise); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-some-find-every.mjs b/test/parallel/test-stream-some-find-every.mjs new file mode 100644 index 0000000000..4dfd9a8461 --- /dev/null +++ b/test/parallel/test-stream-some-find-every.mjs @@ -0,0 +1,183 @@ +import * as common from '../common/index.mjs'; +import { setTimeout } from 'timers/promises'; +import { Readable }from '../../lib/index.js'; +import assert from 'assert'; +import tap from 'tap'; + + +function oneTo5() { + return Readable.from([1, 2, 3, 4, 5]); +} + +function oneTo5Async() { + return oneTo5().map(async (x) => { + await Promise.resolve(); + return x; + }); +} +{ + // Some, find, and every work with a synchronous stream and predicate + assert.strictEqual(await oneTo5().some((x) => x > 3), true); + assert.strictEqual(await oneTo5().every((x) => x > 3), false); + assert.strictEqual(await oneTo5().find((x) => x > 3), 4); + assert.strictEqual(await oneTo5().some((x) => x > 6), false); + assert.strictEqual(await oneTo5().every((x) => x < 6), true); + assert.strictEqual(await oneTo5().find((x) => x > 6), undefined); + assert.strictEqual(await Readable.from([]).some(() => true), false); + assert.strictEqual(await Readable.from([]).every(() => true), true); + assert.strictEqual(await Readable.from([]).find(() => true), undefined); +} + +{ + // Some, find, and every work with an asynchronous stream and synchronous predicate + assert.strictEqual(await oneTo5Async().some((x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every((x) => x > 3), false); + assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4); + assert.strictEqual(await oneTo5Async().some((x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every((x) => x < 6), true); + assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined); +} + +{ + // Some, find, and every work on synchronous streams with an asynchronous predicate + assert.strictEqual(await oneTo5().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4); + assert.strictEqual(await oneTo5().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5().every(async (x) => x < 6), true); + assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined); +} + +{ + // Some, find, and every work on asynchronous streams with an asynchronous predicate + assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4); + assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true); + assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined); +} + +{ + async function checkDestroyed(stream) { + await setTimeout(); + assert.strictEqual(stream.destroyed, true); + } + + { + // Some, find, and every short circuit + const someStream = oneTo5(); + await someStream.some(common.mustCall((x) => x > 2, 3)); + await checkDestroyed(someStream); + + const everyStream = oneTo5(); + await everyStream.every(common.mustCall((x) => x < 3, 3)); + await checkDestroyed(everyStream); + + const findStream = oneTo5(); + await findStream.find(common.mustCall((x) => x > 1, 2)); + await checkDestroyed(findStream); + + // When short circuit isn't possible the whole stream is iterated + await oneTo5().some(common.mustCall(() => false, 5)); + await oneTo5().every(common.mustCall(() => true, 5)); + await oneTo5().find(common.mustCall(() => false, 5)); + } + + { + // Some, find, and every short circuit async stream/predicate + const someStream = oneTo5Async(); + await someStream.some(common.mustCall(async (x) => x > 2, 3)); + await checkDestroyed(someStream); + + const everyStream = oneTo5Async(); + await everyStream.every(common.mustCall(async (x) => x < 3, 3)); + await checkDestroyed(everyStream); + + const findStream = oneTo5Async(); + await findStream.find(common.mustCall(async (x) => x > 1, 2)); + await checkDestroyed(findStream); + + // When short circuit isn't possible the whole stream is iterated + await oneTo5Async().some(common.mustCall(async () => false, 5)); + await oneTo5Async().every(common.mustCall(async () => true, 5)); + await oneTo5Async().find(common.mustCall(async () => false, 5)); + } +} + +{ + // Concurrency doesn't affect which value is found. + const found = await Readable.from([1, 2]).find(async (val) => { + if (val === 1) { + await setTimeout(100); + } + return true; + }, { concurrency: 2 }); + assert.strictEqual(found, 1); +} + +{ + // Support for AbortSignal + for (const op of ['some', 'every', 'find']) { + { + const ac = new AbortController(); + assert.rejects(Readable.from([1, 2, 3])[op]( + () => new Promise(() => { }), + { signal: ac.signal } + ), { + name: 'AbortError', + }, `${op} should abort correctly with sync abort`).then(common.mustCall()); + ac.abort(); + } + { + // Support for pre-aborted AbortSignal + assert.rejects(Readable.from([1, 2, 3])[op]( + () => new Promise(() => { }), + { signal: AbortSignal.abort() } + ), { + name: 'AbortError', + }, `${op} should abort with pre-aborted abort controller`).then(common.mustCall()); + } + } +} +{ + // Error cases + for (const op of ['some', 'every', 'find']) { + assert.rejects(async () => { + await Readable.from([1])[op](1); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid function`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, 1); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, { + signal: true + }); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid signal`).then(common.mustCall()); + } +} +{ + for (const op of ['some', 'every', 'find']) { + const stream = oneTo5(); + Object.defineProperty(stream, 'map', { + value: common.mustNotCall(() => {}), + }); + // Check that map isn't getting called. + stream[op](() => {}); + } +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js new file mode 100644 index 0000000000..dfaeea8e18 --- /dev/null +++ b/test/parallel/test-stream-toArray.js @@ -0,0 +1,108 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Readable, +} = require('../../lib'); +const assert = require('assert'); + +{ + // Works on a synchronous stream + (async () => { + const tests = [ + [], + [1], + [1, 2, 3], + Array(100).fill().map((_, i) => i), + ]; + for (const test of tests) { + const stream = Readable.from(test); + const result = await stream.toArray(); + assert.deepStrictEqual(result, test); + } + })().then(common.mustCall()); +} + +{ + // Works on a non-object-mode stream + (async () => { + const firstBuffer = Buffer.from([1, 2, 3]); + const secondBuffer = Buffer.from([4, 5, 6]); + const stream = Readable.from( + [firstBuffer, secondBuffer], + { objectMode: false }); + const result = await stream.toArray(); + assert.strictEqual(Array.isArray(result), true); + assert.deepStrictEqual(result, [firstBuffer, secondBuffer]); + })().then(common.mustCall()); +} + +{ + // Works on an asynchronous stream + (async () => { + const tests = [ + [], + [1], + [1, 2, 3], + Array(100).fill().map((_, i) => i), + ]; + for (const test of tests) { + const stream = Readable.from(test).map((x) => Promise.resolve(x)); + const result = await stream.toArray(); + assert.deepStrictEqual(result, test); + } + })().then(common.mustCall()); +} + +{ + // Support for AbortSignal + const ac = new AbortController(); + let stream; + assert.rejects(async () => { + stream = Readable.from([1, 2, 3]).map(async (x) => { + if (x === 3) { + await new Promise(() => {}); // Explicitly do not pass signal here + } + return Promise.resolve(x); + }); + await stream.toArray({ signal: ac.signal }); + }, { + name: 'AbortError', + }).then(common.mustCall(() => { + // Only stops toArray, does not destroy the stream + assert(stream.destroyed, false); + })); + ac.abort(); +} +{ + // Test result is a Promise + const result = Readable.from([1, 2, 3, 4, 5]).toArray(); + assert.strictEqual(result instanceof Promise, true); +} +{ + // Error cases + assert.rejects(async () => { + await Readable.from([1]).toArray(1); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + + assert.rejects(async () => { + await Readable.from([1]).toArray({ + signal: true + }); + }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-callback-twice.js b/test/parallel/test-stream-transform-callback-twice.js new file mode 100644 index 0000000000..47c6c85f24 --- /dev/null +++ b/test/parallel/test-stream-transform-callback-twice.js @@ -0,0 +1,29 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Transform } = require('../../lib'); +const stream = new Transform({ + transform(chunk, enc, cb) { cb(); cb(); } +}); + +stream.on('error', common.expectsError({ + name: 'Error', + message: 'Callback called multiple times', + code: 'ERR_MULTIPLE_CALLBACK' +})); + +stream.write('foo'); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js new file mode 100644 index 0000000000..4d73a342aa --- /dev/null +++ b/test/parallel/test-stream-transform-constructor-set-methods.js @@ -0,0 +1,58 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const assert = require('assert'); +const { Transform } = require('../../lib'); + +const t = new Transform(); + +assert.throws( + () => { + t.end(Buffer.from('blerg')); + }, + { + name: 'Error', + code: 'ERR_METHOD_NOT_IMPLEMENTED', + message: 'The _transform() method is not implemented' + } +); + +const _transform = common.mustCall((chunk, _, next) => { + next(); +}); + +const _final = common.mustCall((next) => { + next(); +}); + +const _flush = common.mustCall((next) => { + next(); +}); + +const t2 = new Transform({ + transform: _transform, + flush: _flush, + final: _final +}); + +assert.strictEqual(t2._transform, _transform); +assert.strictEqual(t2._flush, _flush); +assert.strictEqual(t2._final, _final); + +t2.end(Buffer.from('blerg')); +t2.resume(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-destroy.js b/test/parallel/test-stream-transform-destroy.js new file mode 100644 index 0000000000..092f75fa42 --- /dev/null +++ b/test/parallel/test-stream-transform-destroy.js @@ -0,0 +1,158 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Transform } = require('../../lib'); +const assert = require('assert'); + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + transform.resume(); + + transform.on('end', common.mustNotCall()); + transform.on('close', common.mustCall()); + transform.on('finish', common.mustNotCall()); + + transform.destroy(); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + transform.resume(); + + const expected = new Error('kaboom'); + + transform.on('end', common.mustNotCall()); + transform.on('finish', common.mustNotCall()); + transform.on('close', common.mustCall()); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + transform.destroy(expected); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }, 1); + + const expected = new Error('kaboom'); + + transform.on('finish', common.mustNotCall('no finish event')); + transform.on('close', common.mustCall()); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + transform.destroy(expected); +} + +{ + const expected = new Error('kaboom'); + const transform = new Transform({ + transform(chunk, enc, cb) {}, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }, 1) + }); + transform.resume(); + + transform.on('end', common.mustNotCall('no end event')); + transform.on('close', common.mustCall()); + transform.on('finish', common.mustNotCall('no finish event')); + + // Error is swallowed by the custom _destroy + transform.on('error', common.mustNotCall('no error event')); + + transform.destroy(expected); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }, 1); + + transform.destroy(); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + transform.resume(); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.push(null); + this.end(); + cb(); + }); + }, 1); + + const fail = common.mustNotCall('no event'); + + transform.on('finish', fail); + transform.on('end', fail); + transform.on('close', common.mustCall()); + + transform.destroy(); + + transform.removeListener('end', fail); + transform.removeListener('finish', fail); + transform.on('end', common.mustCall()); + transform.on('finish', common.mustCall()); +} + +{ + const transform = new Transform({ + transform(chunk, enc, cb) {} + }); + + const expected = new Error('kaboom'); + + transform._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }, 1); + + transform.on('close', common.mustCall()); + transform.on('finish', common.mustNotCall('no finish event')); + transform.on('end', common.mustNotCall('no end event')); + transform.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + transform.destroy(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-final-sync.js b/test/parallel/test-stream-transform-final-sync.js new file mode 100644 index 0000000000..3e32c118c6 --- /dev/null +++ b/test/parallel/test-stream-transform-final-sync.js @@ -0,0 +1,125 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); +let state = 0; + + +// What you do +// +// const stream = new stream.Transform({ +// transform: function transformCallback(chunk, _, next) { +// // part 1 +// this.push(chunk); +// //part 2 +// next(); +// }, +// final: function endCallback(done) { +// // part 1 +// process.nextTick(function () { +// // part 2 +// done(); +// }); +// }, +// flush: function flushCallback(done) { +// // part 1 +// process.nextTick(function () { +// // part 2 +// done(); +// }); +// } +// }); +// t.on('data', dataListener); +// t.on('end', endListener); +// t.on('finish', finishListener); +// t.write(1); +// t.write(4); +// t.end(7, endMethodCallback); +// +// The order things are called +// +// 1. transformCallback part 1 +// 2. dataListener +// 3. transformCallback part 2 +// 4. transformCallback part 1 +// 5. dataListener +// 6. transformCallback part 2 +// 7. transformCallback part 1 +// 8. dataListener +// 9. transformCallback part 2 +// 10. finalCallback part 1 +// 11. finalCallback part 2 +// 12. flushCallback part 1 +// 13. finishListener +// 14. endMethodCallback +// 15. flushCallback part 2 +// 16. endListener + +const t = new stream.Transform({ + objectMode: true, + transform: common.mustCall(function(chunk, _, next) { + // transformCallback part 1 + assert.strictEqual(++state, chunk); + this.push(state); + // transformCallback part 2 + assert.strictEqual(++state, chunk + 2); + process.nextTick(next); + }, 3), + final: common.mustCall(function(done) { + state++; + // finalCallback part 1 + assert.strictEqual(state, 10); + state++; + // finalCallback part 2 + assert.strictEqual(state, 11); + done(); + }, 1), + flush: common.mustCall(function(done) { + state++; + // fluchCallback part 1 + assert.strictEqual(state, 12); + process.nextTick(function() { + state++; + // fluchCallback part 2 + assert.strictEqual(state, 13); + done(); + }); + }, 1) +}); +t.on('finish', common.mustCall(function() { + state++; + // finishListener + assert.strictEqual(state, 15); +}, 1)); +t.on('end', common.mustCall(function() { + state++; + // endEvent + assert.strictEqual(state, 16); +}, 1)); +t.on('data', common.mustCall(function(d) { + // dataListener + assert.strictEqual(++state, d + 1); +}, 3)); +t.write(1); +t.write(4); +t.end(7, common.mustCall(function() { + state++; + // endMethodCallback + assert.strictEqual(state, 14); +}, 1)); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js new file mode 100644 index 0000000000..549a610cc5 --- /dev/null +++ b/test/parallel/test-stream-transform-final.js @@ -0,0 +1,127 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); +let state = 0; + + +// What you do: +// +// const stream = new stream.Transform({ +// transform: function transformCallback(chunk, _, next) { +// // part 1 +// this.push(chunk); +// //part 2 +// next(); +// }, +// final: function endCallback(done) { +// // part 1 +// process.nextTick(function () { +// // part 2 +// done(); +// }); +// }, +// flush: function flushCallback(done) { +// // part 1 +// process.nextTick(function () { +// // part 2 +// done(); +// }); +// } +// }); +// t.on('data', dataListener); +// t.on('end', endListener); +// t.on('finish', finishListener); +// t.write(1); +// t.write(4); +// t.end(7, endMethodCallback); +// +// The order things are called + +// 1. transformCallback part 1 +// 2. dataListener +// 3. transformCallback part 2 +// 4. transformCallback part 1 +// 5. dataListener +// 6. transformCallback part 2 +// 7. transformCallback part 1 +// 8. dataListener +// 9. transformCallback part 2 +// 10. finalCallback part 1 +// 11. finalCallback part 2 +// 12. flushCallback part 1 +// 13. finishListener +// 14. endMethodCallback +// 15. flushCallback part 2 +// 16. endListener + +const t = new stream.Transform({ + objectMode: true, + transform: common.mustCall(function(chunk, _, next) { + // transformCallback part 1 + assert.strictEqual(++state, chunk); + this.push(state); + // transformCallback part 2 + assert.strictEqual(++state, chunk + 2); + process.nextTick(next); + }, 3), + final: common.mustCall(function(done) { + state++; + // finalCallback part 1 + assert.strictEqual(state, 10); + setTimeout(function() { + state++; + // finalCallback part 2 + assert.strictEqual(state, 11); + done(); + }, 100); + }, 1), + flush: common.mustCall(function(done) { + state++; + // flushCallback part 1 + assert.strictEqual(state, 12); + process.nextTick(function() { + state++; + // flushCallback part 2 + assert.strictEqual(state, 13); + done(); + }); + }, 1) +}); +t.on('finish', common.mustCall(function() { + state++; + // finishListener + assert.strictEqual(state, 15); +}, 1)); +t.on('end', common.mustCall(function() { + state++; + // end event + assert.strictEqual(state, 16); +}, 1)); +t.on('data', common.mustCall(function(d) { + // dataListener + assert.strictEqual(++state, d + 1); +}, 3)); +t.write(1); +t.write(4); +t.end(7, common.mustCall(function() { + state++; + // endMethodCallback + assert.strictEqual(state, 14); +}, 1)); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js new file mode 100644 index 0000000000..e8ad955f25 --- /dev/null +++ b/test/parallel/test-stream-transform-flush-data.js @@ -0,0 +1,43 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); + +const assert = require('assert'); +const Transform = require('../../lib').Transform; + + +const expected = 'asdf'; + + +function _transform(d, e, n) { + n(); +} + +function _flush(n) { + n(null, expected); +} + +const t = new Transform({ + transform: _transform, + flush: _flush +}); + +t.end(Buffer.from('blerg')); +t.on('data', (data) => { + assert.strictEqual(data.toString(), expected); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js new file mode 100644 index 0000000000..6e41f7983e --- /dev/null +++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js @@ -0,0 +1,66 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); +const PassThrough = stream.PassThrough; + +const src = new PassThrough({ objectMode: true }); +const tx = new PassThrough({ objectMode: true }); +const dest = new PassThrough({ objectMode: true }); + +const expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]; +const results = []; + +dest.on('data', common.mustCall(function(x) { + results.push(x); +}, expect.length)); + +src.pipe(tx).pipe(dest); + +let i = -1; +const int = setInterval(common.mustCall(function() { + if (results.length === expect.length) { + src.end(); + clearInterval(int); + assert.deepStrictEqual(results, expect); + } else { + src.write(i++); + } +}, expect.length + 1), 1); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js new file mode 100644 index 0000000000..9241137dcb --- /dev/null +++ b/test/parallel/test-stream-transform-split-highwatermark.js @@ -0,0 +1,107 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const { Transform, Readable, Writable } = require('../../lib'); + +const DEFAULT = 16 * 1024; + +function testTransform(expectedReadableHwm, expectedWritableHwm, options) { + const t = new Transform(options); + assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm); + assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm); +} + +// Test overriding defaultHwm +testTransform(666, DEFAULT, { readableHighWaterMark: 666 }); +testTransform(DEFAULT, 777, { writableHighWaterMark: 777 }); +testTransform(666, 777, { + readableHighWaterMark: 666, + writableHighWaterMark: 777, +}); + +// test 0 overriding defaultHwm +testTransform(0, DEFAULT, { readableHighWaterMark: 0 }); +testTransform(DEFAULT, 0, { writableHighWaterMark: 0 }); + +// Test highWaterMark overriding +testTransform(555, 555, { + highWaterMark: 555, + readableHighWaterMark: 666, +}); +testTransform(555, 555, { + highWaterMark: 555, + writableHighWaterMark: 777, +}); +testTransform(555, 555, { + highWaterMark: 555, + readableHighWaterMark: 666, + writableHighWaterMark: 777, +}); + +// Test highWaterMark = 0 overriding +testTransform(0, 0, { + highWaterMark: 0, + readableHighWaterMark: 666, +}); +testTransform(0, 0, { + highWaterMark: 0, + writableHighWaterMark: 777, +}); +testTransform(0, 0, { + highWaterMark: 0, + readableHighWaterMark: 666, + writableHighWaterMark: 777, +}); + +// Test undefined, null +[undefined, null].forEach((v) => { + testTransform(DEFAULT, DEFAULT, { readableHighWaterMark: v }); + testTransform(DEFAULT, DEFAULT, { writableHighWaterMark: v }); + testTransform(666, DEFAULT, { highWaterMark: v, readableHighWaterMark: 666 }); + testTransform(DEFAULT, 777, { highWaterMark: v, writableHighWaterMark: 777 }); +}); + +// test NaN +{ + assert.throws(() => { + new Transform({ readableHighWaterMark: NaN }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.readableHighWaterMark' is invalid. " + + 'Received NaN' + }); + + assert.throws(() => { + new Transform({ writableHighWaterMark: NaN }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.writableHighWaterMark' is invalid. " + + 'Received NaN' + }); +} + +// Test non Duplex streams ignore the options +{ + const r = new Readable({ readableHighWaterMark: 666 }); + assert.strictEqual(r._readableState.highWaterMark, DEFAULT); + const w = new Writable({ writableHighWaterMark: 777 }); + assert.strictEqual(w._writableState.highWaterMark, DEFAULT); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js new file mode 100644 index 0000000000..a50e186225 --- /dev/null +++ b/test/parallel/test-stream-transform-split-objectmode.js @@ -0,0 +1,96 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const Transform = require('../../lib').Transform; + +const parser = new Transform({ readableObjectMode: true }); + +assert(parser._readableState.objectMode); +assert(!parser._writableState.objectMode); +assert.strictEqual(parser.readableHighWaterMark, 16); +assert.strictEqual(parser.writableHighWaterMark, 16 * 1024); +assert.strictEqual(parser.readableHighWaterMark, + parser._readableState.highWaterMark); +assert.strictEqual(parser.writableHighWaterMark, + parser._writableState.highWaterMark); + +parser._transform = function(chunk, enc, callback) { + callback(null, { val: chunk[0] }); +}; + +let parsed; + +parser.on('data', function(obj) { + parsed = obj; +}); + +parser.end(Buffer.from([42])); + +process.on('exit', function() { + assert.strictEqual(parsed.val, 42); +}); + + +const serializer = new Transform({ writableObjectMode: true }); + +assert(!serializer._readableState.objectMode); +assert(serializer._writableState.objectMode); +assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024); +assert.strictEqual(serializer.writableHighWaterMark, 16); +assert.strictEqual(parser.readableHighWaterMark, + parser._readableState.highWaterMark); +assert.strictEqual(parser.writableHighWaterMark, + parser._writableState.highWaterMark); + +serializer._transform = function(obj, _, callback) { + callback(null, Buffer.from([obj.val])); +}; + +let serialized; + +serializer.on('data', function(chunk) { + serialized = chunk; +}); + +serializer.write({ val: 42 }); + +process.on('exit', function() { + assert.strictEqual(serialized[0], 42); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-uint8array.js b/test/parallel/test-stream-uint8array.js new file mode 100644 index 0000000000..93d4444d98 --- /dev/null +++ b/test/parallel/test-stream-uint8array.js @@ -0,0 +1,116 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const { Readable, Writable } = require('../../lib'); + +const ABC = new Uint8Array([0x41, 0x42, 0x43]); +const DEF = new Uint8Array([0x44, 0x45, 0x46]); +const GHI = new Uint8Array([0x47, 0x48, 0x49]); + +{ + // Simple Writable test. + + let n = 0; + const writable = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert(chunk instanceof Buffer); + if (n++ === 0) { + assert.strictEqual(String(chunk), 'ABC'); + } else { + assert.strictEqual(String(chunk), 'DEF'); + } + + cb(); + }, 2) + }); + + writable.write(ABC); + writable.end(DEF); +} + +{ + // Writable test, pass in Uint8Array in object mode. + + const writable = new Writable({ + objectMode: true, + write: common.mustCall((chunk, encoding, cb) => { + assert(!(chunk instanceof Buffer)); + assert(chunk instanceof Uint8Array); + assert.strictEqual(chunk, ABC); + assert.strictEqual(encoding, 'utf8'); + cb(); + }) + }); + + writable.end(ABC); +} + +{ + // Writable test, multiple writes carried out via writev. + let callback; + + const writable = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + assert(chunk instanceof Buffer); + assert.strictEqual(encoding, 'buffer'); + assert.strictEqual(String(chunk), 'ABC'); + callback = cb; + }), + writev: common.mustCall((chunks, cb) => { + assert.strictEqual(chunks.length, 2); + assert.strictEqual(chunks[0].encoding, 'buffer'); + assert.strictEqual(chunks[1].encoding, 'buffer'); + assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI'); + }) + }); + + writable.write(ABC); + writable.write(DEF); + writable.end(GHI); + callback(); +} + +{ + // Simple Readable test. + const readable = new Readable({ + read() {} + }); + + readable.push(DEF); + readable.unshift(ABC); + + const buf = readable.read(); + assert(buf instanceof Buffer); + assert.deepStrictEqual([...buf], [...ABC, ...DEF]); +} + +{ + // Readable test, setEncoding. + const readable = new Readable({ + read() {} + }); + + readable.setEncoding('utf8'); + + readable.push(DEF); + readable.unshift(ABC); + + const out = readable.read(); + assert.strictEqual(out, 'ABCDEF'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js new file mode 100644 index 0000000000..0f5f2373a8 --- /dev/null +++ b/test/parallel/test-stream-unpipe-event.js @@ -0,0 +1,100 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Writable, Readable } = require('../../lib'); +class NullWriteable extends Writable { + _write(chunk, encoding, callback) { + return callback(); + } +} +class QuickEndReadable extends Readable { + _read() { + this.push(null); + } +} +class NeverEndReadable extends Readable { + _read() {} +} + +{ + const dest = new NullWriteable(); + const src = new QuickEndReadable(); + dest.on('pipe', common.mustCall()); + dest.on('unpipe', common.mustCall()); + src.pipe(dest); + setImmediate(() => { + assert.strictEqual(src._readableState.pipes.length, 0); + }); +} + +{ + const dest = new NullWriteable(); + const src = new NeverEndReadable(); + dest.on('pipe', common.mustCall()); + dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); + src.pipe(dest); + setImmediate(() => { + assert.strictEqual(src._readableState.pipes.length, 1); + }); +} + +{ + const dest = new NullWriteable(); + const src = new NeverEndReadable(); + dest.on('pipe', common.mustCall()); + dest.on('unpipe', common.mustCall()); + src.pipe(dest); + src.unpipe(dest); + setImmediate(() => { + assert.strictEqual(src._readableState.pipes.length, 0); + }); +} + +{ + const dest = new NullWriteable(); + const src = new QuickEndReadable(); + dest.on('pipe', common.mustCall()); + dest.on('unpipe', common.mustCall()); + src.pipe(dest, { end: false }); + setImmediate(() => { + assert.strictEqual(src._readableState.pipes.length, 0); + }); +} + +{ + const dest = new NullWriteable(); + const src = new NeverEndReadable(); + dest.on('pipe', common.mustCall()); + dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); + src.pipe(dest, { end: false }); + setImmediate(() => { + assert.strictEqual(src._readableState.pipes.length, 1); + }); +} + +{ + const dest = new NullWriteable(); + const src = new NeverEndReadable(); + dest.on('pipe', common.mustCall()); + dest.on('unpipe', common.mustCall()); + src.pipe(dest, { end: false }); + src.unpipe(dest); + setImmediate(() => { + assert.strictEqual(src._readableState.pipes.length, 0); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js new file mode 100644 index 0000000000..72828aae25 --- /dev/null +++ b/test/parallel/test-stream-unshift-empty-chunk.js @@ -0,0 +1,95 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +// This test verifies that stream.unshift(Buffer.alloc(0)) or +// stream.unshift('') does not set state.reading=false. +const Readable = require('../../lib').Readable; + +const r = new Readable(); +let nChunks = 10; +const chunk = Buffer.alloc(10, 'x'); + +r._read = function(n) { + setImmediate(() => { + r.push(--nChunks === 0 ? null : chunk); + }); +}; + +let readAll = false; +const seen = []; +r.on('readable', () => { + let chunk; + while ((chunk = r.read()) !== null) { + seen.push(chunk.toString()); + // Simulate only reading a certain amount of the data, + // and then putting the rest of the chunk back into the + // stream, like a parser might do. We just fill it with + // 'y' so that it's easy to see which bits were touched, + // and which were not. + const putBack = Buffer.alloc(readAll ? 0 : 5, 'y'); + readAll = !readAll; + r.unshift(putBack); + } +}); + +const expect = + [ 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy' ]; + +r.on('end', () => { + assert.deepStrictEqual(seen, expect); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js new file mode 100644 index 0000000000..e225c15a3d --- /dev/null +++ b/test/parallel/test-stream-unshift-read-race.js @@ -0,0 +1,143 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +// This test verifies that: +// 1. unshift() does not cause colliding _read() calls. +// 2. unshift() after the 'end' event is an error, but after the EOF +// signalling null, it is ok, and just creates a new readable chunk. +// 3. push() after the EOF signaling null is an error. +// 4. _read() is not called after pushing the EOF null chunk. + +const stream = require('../../lib'); +const hwm = 10; +const r = stream.Readable({ highWaterMark: hwm, autoDestroy: false }); +const chunks = 10; + +const data = Buffer.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2)); +for (let i = 0; i < data.length; i++) { + const c = 'asdf'.charCodeAt(i % 4); + data[i] = c; +} + +let pos = 0; +let pushedNull = false; +r._read = function(n) { + assert(!pushedNull, '_read after null push'); + + // Every third chunk is fast + push(!(chunks % 3)); + + function push(fast) { + assert(!pushedNull, 'push() after null push'); + const c = pos >= data.length ? null : data.slice(pos, pos + n); + pushedNull = c === null; + if (fast) { + pos += n; + r.push(c); + if (c === null) pushError(); + } else { + setTimeout(function() { + pos += n; + r.push(c); + if (c === null) pushError(); + }, 1); + } + } +}; + +function pushError() { + r.unshift(Buffer.allocUnsafe(1)); + w.end(); + + assert.throws(() => { + r.push(Buffer.allocUnsafe(1)); + }, { + code: 'ERR_STREAM_PUSH_AFTER_EOF', + name: 'Error', + message: 'stream.push() after EOF' + }); +} + + +const w = stream.Writable(); +const written = []; +w._write = function(chunk, encoding, cb) { + written.push(chunk.toString()); + cb(); +}; + +r.on('end', common.mustNotCall()); + +r.on('readable', function() { + let chunk; + while (null !== (chunk = r.read(10))) { + w.write(chunk); + if (chunk.length > 4) + r.unshift(Buffer.from('1234')); + } +}); + +w.on('finish', common.mustCall(function() { + // Each chunk should start with 1234, and then be asfdasdfasdf... + // The first got pulled out before the first unshift('1234'), so it's + // lacking that piece. + assert.strictEqual(written[0], 'asdfasdfas'); + let asdf = 'd'; + silentConsole.error(`0: ${written[0]}`); + for (let i = 1; i < written.length; i++) { + silentConsole.error(`${i.toString(32)}: ${written[i]}`); + assert.strictEqual(written[i].slice(0, 4), '1234'); + for (let j = 4; j < written[i].length; j++) { + const c = written[i].charAt(j); + assert.strictEqual(c, asdf); + switch (asdf) { + case 'a': asdf = 's'; break; + case 's': asdf = 'd'; break; + case 'd': asdf = 'f'; break; + case 'f': asdf = 'a'; break; + } + } + } +})); + +process.on('exit', function() { + assert.strictEqual(written.length, 18); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-wrap-drain.js b/test/parallel/test-stream-wrap-drain.js new file mode 100644 index 0000000000..1af1449596 --- /dev/null +++ b/test/parallel/test-stream-wrap-drain.js @@ -0,0 +1,65 @@ +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { StreamWrap } = require('../../lib/internal/js_stream_socket'); +const { Duplex } = require('../../lib'); +const internalBinding = process.binding +const { ShutdownWrap } = internalBinding('stream_wrap'); + +// This test makes sure that when a wrapped stream is waiting for +// a "drain" event to `doShutdown`, the instance will work correctly when a +// "drain" event emitted. +{ + let resolve = null; + + class TestDuplex extends Duplex { + _write(chunk, encoding, callback) { + // We will resolve the write later. + resolve = () => { + callback(); + }; + } + + _read() {} + } + + const testDuplex = new TestDuplex(); + const socket = new StreamWrap(testDuplex); + + socket.write( + // Make the buffer long enough so that the `Writable` will emit "drain". + Buffer.allocUnsafe(socket.writableHighWaterMark * 2), + common.mustCall() + ); + + // Make sure that the 'drain' events will be emitted. + testDuplex.on('drain', common.mustCall(() => { + silentConsole.log('testDuplex drain'); + })); + + assert.strictEqual(typeof resolve, 'function'); + + const req = new ShutdownWrap(); + req.oncomplete = common.mustCall(); + req.handle = socket._handle; + // Should not throw. + socket._handle.shutdown(req); + + resolve(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-wrap-encoding.js b/test/parallel/test-stream-wrap-encoding.js new file mode 100644 index 0000000000..cfc69d965d --- /dev/null +++ b/test/parallel/test-stream-wrap-encoding.js @@ -0,0 +1,58 @@ +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const StreamWrap = require('../../lib/internal/js_stream_socket'); +const Duplex = require('../../lib').Duplex; + +{ + const stream = new Duplex({ + read() {}, + write() {} + }); + + stream.setEncoding('ascii'); + + const wrap = new StreamWrap(stream); + + wrap.on('error', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_WRAP', + message: 'Stream has StringDecoder set or is in objectMode' + })); + + stream.push('ohai'); +} + +{ + const stream = new Duplex({ + read() {}, + write() {}, + objectMode: true + }); + + const wrap = new StreamWrap(stream); + + wrap.on('error', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_WRAP', + message: 'Stream has StringDecoder set or is in objectMode' + })); + + stream.push(new Error('foo')); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-wrap.js b/test/parallel/test-stream-wrap.js new file mode 100644 index 0000000000..9c0074f7cf --- /dev/null +++ b/test/parallel/test-stream-wrap.js @@ -0,0 +1,48 @@ +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const internalBinding = process.binding +const StreamWrap = require('../../lib/internal/js_stream_socket'); +const { Duplex } = require('../../lib'); +const { ShutdownWrap } = internalBinding('stream_wrap'); + +function testShutdown(callback) { + const stream = new Duplex({ + read: function() { + }, + write: function() { + } + }); + + const wrap = new StreamWrap(stream); + + const req = new ShutdownWrap(); + req.oncomplete = function(code) { + assert(code < 0); + callback(); + }; + req.handle = wrap._handle; + + // Close the handle to simulate + wrap.destroy(); + req.handle.shutdown(req); +} + +testShutdown(common.mustCall()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-callback-twice.js b/test/parallel/test-stream-writable-callback-twice.js new file mode 100644 index 0000000000..9115216946 --- /dev/null +++ b/test/parallel/test-stream-writable-callback-twice.js @@ -0,0 +1,29 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Writable } = require('../../lib'); +const stream = new Writable({ + write(chunk, enc, cb) { cb(); cb(); } +}); + +stream.on('error', common.expectsError({ + name: 'Error', + message: 'Callback called multiple times', + code: 'ERR_MULTIPLE_CALLBACK' +})); + +stream.write('foo'); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js new file mode 100644 index 0000000000..15722705f0 --- /dev/null +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -0,0 +1,93 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); + +class MyWritable extends stream.Writable { + constructor(fn, options) { + super(options); + this.fn = fn; + } + + _write(chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); + callback(); + } +} + +(function defaultCondingIsUtf8() { + const m = new MyWritable(function(isBuffer, type, enc) { + assert.strictEqual(enc, 'utf8'); + }, { decodeStrings: false }); + m.write('foo'); + m.end(); +}()); + +(function changeDefaultEncodingToAscii() { + const m = new MyWritable(function(isBuffer, type, enc) { + assert.strictEqual(enc, 'ascii'); + }, { decodeStrings: false }); + m.setDefaultEncoding('ascii'); + m.write('bar'); + m.end(); +}()); + +// Change default encoding to invalid value. +assert.throws(() => { + const m = new MyWritable( + (isBuffer, type, enc) => {}, + { decodeStrings: false }); + m.setDefaultEncoding({}); + m.write('bar'); + m.end(); +}, { + name: 'TypeError', + code: 'ERR_UNKNOWN_ENCODING', + message: 'Unknown encoding: {}' +}); + +(function checkVariableCaseEncoding() { + const m = new MyWritable(function(isBuffer, type, enc) { + assert.strictEqual(enc, 'ascii'); + }, { decodeStrings: false }); + m.setDefaultEncoding('AsCii'); + m.write('bar'); + m.end(); +}()); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-clear-buffer.js b/test/parallel/test-stream-writable-clear-buffer.js new file mode 100644 index 0000000000..148df0e547 --- /dev/null +++ b/test/parallel/test-stream-writable-clear-buffer.js @@ -0,0 +1,50 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +// This test ensures that the _writeableState.bufferedRequestCount and +// the actual buffered request count are the same. + +const common = require('../common'); +const Stream = require('../../lib'); +const assert = require('assert'); + +class StreamWritable extends Stream.Writable { + constructor() { + super({ objectMode: true }); + } + + // Refs: https://github.com/nodejs/node/issues/6758 + // We need a timer like on the original issue thread. + // Otherwise the code will never reach our test case. + _write(chunk, encoding, cb) { + setImmediate(cb); + } +} + +const testStream = new StreamWritable(); +testStream.cork(); + +for (let i = 1; i <= 5; i++) { + testStream.write(i, common.mustCall(() => { + assert.strictEqual( + testStream._writableState.bufferedRequestCount, + testStream._writableState.getBuffer().length + ); + })); +} + +testStream.end(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js new file mode 100644 index 0000000000..a55c552d8b --- /dev/null +++ b/test/parallel/test-stream-writable-constructor-set-methods.js @@ -0,0 +1,56 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const assert = require('assert'); +const { Writable } = require('../../lib'); + +const bufferBlerg = Buffer.from('blerg'); +const w = new Writable(); + +assert.throws( + () => { + w.end(bufferBlerg); + }, + { + name: 'Error', + code: 'ERR_METHOD_NOT_IMPLEMENTED', + message: 'The _write() method is not implemented' + } +); + +const _write = common.mustCall((chunk, _, next) => { + next(); +}); + +const _writev = common.mustCall((chunks, next) => { + assert.strictEqual(chunks.length, 2); + next(); +}); + +const w2 = new Writable({ write: _write, writev: _writev }); + +assert.strictEqual(w2._write, _write); +assert.strictEqual(w2._writev, _writev); + +w2.write(bufferBlerg); + +w2.cork(); +w2.write(bufferBlerg); +w2.write(bufferBlerg); + +w2.end(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js new file mode 100644 index 0000000000..8754727ce6 --- /dev/null +++ b/test/parallel/test-stream-writable-decoded-encoding.js @@ -0,0 +1,73 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); + +class MyWritable extends stream.Writable { + constructor(fn, options) { + super(options); + this.fn = fn; + } + + _write(chunk, encoding, callback) { + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); + callback(); + } +} + +{ + const m = new MyWritable(function(isBuffer, type, enc) { + assert(isBuffer); + assert.strictEqual(type, 'object'); + assert.strictEqual(enc, 'buffer'); + }, { decodeStrings: true }); + m.write('some-text', 'utf8'); + m.end(); +} + +{ + const m = new MyWritable(function(isBuffer, type, enc) { + assert(!isBuffer); + assert.strictEqual(type, 'string'); + assert.strictEqual(enc, 'utf8'); + }, { decodeStrings: false }); + m.write('some-text', 'utf8'); + m.end(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js new file mode 100644 index 0000000000..5c2f0c97c1 --- /dev/null +++ b/test/parallel/test-stream-writable-destroy.js @@ -0,0 +1,506 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Writable, addAbortSignal } = require('../../lib'); +const assert = require('assert'); + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.on('finish', common.mustNotCall()); + write.on('close', common.mustCall()); + + write.destroy(); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { + this.destroy(new Error('asd')); + cb(); + } + }); + + write.on('error', common.mustCall()); + write.on('finish', common.mustNotCall()); + write.end('asd'); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + const expected = new Error('kaboom'); + + write.on('finish', common.mustNotCall()); + write.on('close', common.mustCall()); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + write.destroy(expected); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write._destroy = function(err, cb) { + assert.strictEqual(err, expected); + cb(err); + }; + + const expected = new Error('kaboom'); + + write.on('finish', common.mustNotCall('no finish event')); + write.on('close', common.mustCall()); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + write.destroy(expected); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); }, + destroy: common.mustCall(function(err, cb) { + assert.strictEqual(err, expected); + cb(); + }) + }); + + const expected = new Error('kaboom'); + + write.on('finish', common.mustNotCall('no finish event')); + write.on('close', common.mustCall()); + + // Error is swallowed by the custom _destroy + write.on('error', common.mustNotCall('no error event')); + + write.destroy(expected); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(); + }); + + write.destroy(); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + process.nextTick(() => { + this.end(); + cb(); + }); + }); + + const fail = common.mustNotCall('no finish event'); + + write.on('finish', fail); + write.on('close', common.mustCall()); + + write.destroy(); + + write.removeListener('finish', fail); + write.on('finish', common.mustCall()); + assert.strictEqual(write.destroyed, true); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + const expected = new Error('kaboom'); + + write._destroy = common.mustCall(function(err, cb) { + assert.strictEqual(err, null); + cb(expected); + }); + + write.on('close', common.mustCall()); + write.on('finish', common.mustNotCall('no finish event')); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, expected); + })); + + write.destroy(); + assert.strictEqual(write.destroyed, true); +} + +{ + // double error case + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + let ticked = false; + write.on('close', common.mustCall(() => { + assert.strictEqual(ticked, true); + })); + write.on('error', common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.message, 'kaboom 1'); + assert.strictEqual(write._writableState.errorEmitted, true); + })); + + const expected = new Error('kaboom 1'); + write.destroy(expected); + write.destroy(new Error('kaboom 2')); + assert.strictEqual(write._writableState.errored, expected); + assert.strictEqual(write._writableState.errorEmitted, false); + assert.strictEqual(write.destroyed, true); + ticked = true; +} + +{ + const writable = new Writable({ + destroy: common.mustCall(function(err, cb) { + process.nextTick(cb, new Error('kaboom 1')); + }), + write(chunk, enc, cb) { + cb(); + } + }); + + let ticked = false; + writable.on('close', common.mustCall(() => { + writable.on('error', common.mustNotCall()); + writable.destroy(new Error('hello')); + assert.strictEqual(ticked, true); + assert.strictEqual(writable._writableState.errorEmitted, true); + })); + writable.on('error', common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.message, 'kaboom 1'); + assert.strictEqual(writable._writableState.errorEmitted, true); + })); + + writable.destroy(); + assert.strictEqual(writable.destroyed, true); + assert.strictEqual(writable._writableState.errored, null); + assert.strictEqual(writable._writableState.errorEmitted, false); + + // Test case where `writable.destroy()` is called again with an error before + // the `_destroy()` callback is called. + writable.destroy(new Error('kaboom 2')); + assert.strictEqual(writable._writableState.errorEmitted, false); + assert.strictEqual(writable._writableState.errored, null); + + ticked = true; +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.destroyed = true; + assert.strictEqual(write.destroyed, true); + + // The internal destroy() mechanism should not be triggered + write.on('close', common.mustNotCall()); + write.destroy(); +} + +{ + function MyWritable() { + assert.strictEqual(this.destroyed, false); + this.destroyed = false; + Writable.call(this); + } + + Object.setPrototypeOf(MyWritable.prototype, Writable.prototype); + Object.setPrototypeOf(MyWritable, Writable); + + new MyWritable(); +} + +{ + // Destroy and destroy callback + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.destroy(); + + const expected = new Error('kaboom'); + + write.destroy(expected, common.mustCall((err) => { + assert.strictEqual(err, undefined); + })); +} + +{ + // Checks that `._undestroy()` restores the state so that `final` will be + // called again. + const write = new Writable({ + write: common.mustNotCall(), + final: common.mustCall((cb) => cb(), 2), + autoDestroy: true + }); + + write.end(); + write.once('close', common.mustCall(() => { + write._undestroy(); + write.end(); + })); +} + +{ + const write = new Writable(); + + write.destroy(); + write.on('error', common.mustNotCall()); + write.write('asd', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + })); +} + +{ + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustNotCall()); + + write.cork(); + write.write('asd', common.mustCall()); + write.uncork(); + + write.cork(); + write.write('asd', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + })); + write.destroy(); + write.write('asd', common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + })); + write.uncork(); +} + +{ + // Call end(cb) after error & destroy + + const write = new Writable({ + write(chunk, enc, cb) { cb(new Error('asd')); } + }); + write.on('error', common.mustCall(() => { + write.destroy(); + let ticked = false; + write.end(common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + })); + ticked = true; + })); + write.write('asd'); +} + +{ + // Call end(cb) after finish & destroy + + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + write.on('finish', common.mustCall(() => { + write.destroy(); + let ticked = false; + write.end(common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); + })); + ticked = true; + })); + write.end(); +} + +{ + // Call end(cb) after error & destroy and don't trigger + // unhandled exception. + + const write = new Writable({ + write(chunk, enc, cb) { process.nextTick(cb); } + }); + const _err = new Error('asd'); + write.once('error', common.mustCall((err) => { + assert.strictEqual(err.message, 'asd'); + })); + write.end('asd', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.destroy(_err); +} + +{ + // Call buffered write callback with error + + const _err = new Error('asd'); + const write = new Writable({ + write(chunk, enc, cb) { + process.nextTick(cb, _err); + }, + autoDestroy: false + }); + write.cork(); + write.write('asd', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.write('asd', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + write.uncork(); +} + +{ + // Ensure callback order. + + let state = 0; + const write = new Writable({ + write(chunk, enc, cb) { + // `setImmediate()` is used on purpose to ensure the callback is called + // after `process.nextTick()` callbacks. + setImmediate(cb); + } + }); + write.write('asd', common.mustCall(() => { + assert.strictEqual(state++, 0); + })); + write.write('asd', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + assert.strictEqual(state++, 1); + })); + write.destroy(); +} + +{ + const write = new Writable({ + autoDestroy: false, + write(chunk, enc, cb) { + cb(); + cb(); + } + }); + + write.on('error', common.mustCall(() => { + assert(write._writableState.errored); + })); + write.write('asd'); +} + +{ + const ac = new AbortController(); + const write = addAbortSignal(ac.signal, new Writable({ + write(chunk, enc, cb) { cb(); } + })); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); + write.write('asd'); + ac.abort(); +} + +{ + const ac = new AbortController(); + const write = new Writable({ + signal: ac.signal, + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); + write.write('asd'); + ac.abort(); +} + +{ + const signal = AbortSignal.abort(); + + const write = new Writable({ + signal, + write(chunk, enc, cb) { cb(); } + }); + + write.on('error', common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError'); + assert.strictEqual(write.destroyed, true); + })); +} + +{ + // Destroy twice + const write = new Writable({ + write(chunk, enc, cb) { cb(); } + }); + + write.end(common.mustCall()); + write.destroy(); + write.destroy(); +} + +{ + // https://github.com/nodejs/node/issues/39356 + const s = new Writable({ + final() {} + }); + const _err = new Error('oh no'); + // Remove `callback` and it works + s.end(common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + s.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + s.destroy(_err); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-end-cb-error.js b/test/parallel/test-stream-writable-end-cb-error.js new file mode 100644 index 0000000000..b3f3df5927 --- /dev/null +++ b/test/parallel/test-stream-writable-end-cb-error.js @@ -0,0 +1,93 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +{ + // Invoke end callback on failure. + const writable = new stream.Writable(); + + const _err = new Error('kaboom'); + writable._write = (chunk, encoding, cb) => { + process.nextTick(cb, _err); + }; + + writable.on('error', common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + writable.write('asd'); + writable.end(common.mustCall((err) => { + assert.strictEqual(err, _err); + })); + writable.end(common.mustCall((err) => { + assert.strictEqual(err, _err); + })); +} + +{ + // Don't invoke end callback twice + const writable = new stream.Writable(); + + writable._write = (chunk, encoding, cb) => { + process.nextTick(cb); + }; + + let called = false; + writable.end('asd', common.mustCall((err) => { + called = true; + assert.strictEqual(err, undefined); + })); + + writable.on('error', common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom'); + })); + writable.on('finish', common.mustCall(() => { + assert.strictEqual(called, true); + writable.emit('error', new Error('kaboom')); + })); +} + +{ + const w = new stream.Writable({ + write(chunk, encoding, callback) { + setImmediate(callback); + }, + finish(callback) { + setImmediate(callback); + } + }); + w.end('testing ended state', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + assert.strictEqual(w.destroyed, false); + assert.strictEqual(w.writableEnded, true); + w.end(common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + assert.strictEqual(w.destroyed, false); + assert.strictEqual(w.writableEnded, true); + w.end('end', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + assert.strictEqual(w.destroyed, true); + w.on('error', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + w.on('finish', common.mustNotCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-end-cb-uncaught.js b/test/parallel/test-stream-writable-end-cb-uncaught.js new file mode 100644 index 0000000000..f02db94897 --- /dev/null +++ b/test/parallel/test-stream-writable-end-cb-uncaught.js @@ -0,0 +1,39 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +process.on('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom'); +})); + +const writable = new stream.Writable(); +const _err = new Error('kaboom'); + +writable._write = (chunk, encoding, cb) => { + cb(); +}; +writable._final = (cb) => { + cb(_err); +}; + +writable.write('asd'); +writable.end(common.mustCall((err) => { + assert.strictEqual(err, _err); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-end-multiple.js b/test/parallel/test-stream-writable-end-multiple.js new file mode 100644 index 0000000000..552997402b --- /dev/null +++ b/test/parallel/test-stream-writable-end-multiple.js @@ -0,0 +1,37 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); + +const assert = require('assert'); +const stream = require('../../lib'); + +const writable = new stream.Writable(); +writable._write = (chunk, encoding, cb) => { + setTimeout(() => cb(), 10); +}; + +writable.end('testing ended state', common.mustCall()); +writable.end(common.mustCall()); +writable.on('finish', common.mustCall(() => { + let ticked = false; + writable.end(common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); + })); + ticked = true; +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js new file mode 100644 index 0000000000..0779c3f707 --- /dev/null +++ b/test/parallel/test-stream-writable-ended-state.js @@ -0,0 +1,47 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); + +const assert = require('assert'); +const stream = require('../../lib'); + +const writable = new stream.Writable(); + +writable._write = (chunk, encoding, cb) => { + assert.strictEqual(writable._writableState.ended, false); + assert.strictEqual(writable._writableState.writable, undefined); + assert.strictEqual(writable.writableEnded, false); + cb(); +}; + +assert.strictEqual(writable._writableState.ended, false); +assert.strictEqual(writable._writableState.writable, undefined); +assert.strictEqual(writable.writable, true); +assert.strictEqual(writable.writableEnded, false); + +writable.end('testing ended state', common.mustCall(() => { + assert.strictEqual(writable._writableState.ended, true); + assert.strictEqual(writable._writableState.writable, undefined); + assert.strictEqual(writable.writable, false); + assert.strictEqual(writable.writableEnded, true); +})); + +assert.strictEqual(writable._writableState.ended, true); +assert.strictEqual(writable._writableState.writable, undefined); +assert.strictEqual(writable.writable, false); +assert.strictEqual(writable.writableEnded, true); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-final-async.js b/test/parallel/test-stream-writable-final-async.js new file mode 100644 index 0000000000..822d980950 --- /dev/null +++ b/test/parallel/test-stream-writable-final-async.js @@ -0,0 +1,48 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Duplex, +} = require('../../lib'); + + const st = require('timers').setTimeout; + + function setTimeout(ms) { + return new Promise(resolve => { + st(resolve, ms); + }); + } + + +{ + class Foo extends Duplex { + async _final(callback) { + await setTimeout(common.platformTimeout(1)); + callback(); + } + + _read() {} + } + + const foo = new Foo(); + foo._write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + foo.end('test', common.mustCall()); + foo.on('error', common.mustNotCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js new file mode 100644 index 0000000000..7e7c998d33 --- /dev/null +++ b/test/parallel/test-stream-writable-final-destroy.js @@ -0,0 +1,36 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const { Writable } = require('../../lib'); + +{ + const w = new Writable({ + write(chunk, encoding, callback) { + callback(null); + }, + final(callback) { + queueMicrotask(callback); + } + }); + w.end(); + w.destroy(); + + w.on('prefinish', common.mustNotCall()); + w.on('finish', common.mustNotCall()); + w.on('close', common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-final-throw.js b/test/parallel/test-stream-writable-final-throw.js new file mode 100644 index 0000000000..471886557a --- /dev/null +++ b/test/parallel/test-stream-writable-final-throw.js @@ -0,0 +1,38 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { + Duplex, +} = require('../../lib'); + +{ + class Foo extends Duplex { + _final(callback) { + throw new Error('fhqwhgads'); + } + + _read() {} + } + + const foo = new Foo(); + foo._write = common.mustCall((chunk, encoding, cb) => { + cb(); + }); + foo.end('test', common.expectsError({ message: 'fhqwhgads' })); + foo.on('error', common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-finish-destroyed.js b/test/parallel/test-stream-writable-finish-destroyed.js new file mode 100644 index 0000000000..ef72823a52 --- /dev/null +++ b/test/parallel/test-stream-writable-finish-destroyed.js @@ -0,0 +1,48 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Writable } = require('../../lib'); + +{ + const w = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + w.on('close', common.mustCall(() => { + cb(); + })); + }) + }); + + w.on('finish', common.mustNotCall()); + w.end('asd'); + w.destroy(); +} + +{ + const w = new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + w.on('close', common.mustCall(() => { + cb(); + w.end(); + })); + }) + }); + + w.on('finish', common.mustNotCall()); + w.write('asd'); + w.destroy(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js new file mode 100644 index 0000000000..1ef0928728 --- /dev/null +++ b/test/parallel/test-stream-writable-finished-state.js @@ -0,0 +1,37 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); + +const assert = require('assert'); +const stream = require('../../lib'); + +const writable = new stream.Writable(); + +writable._write = (chunk, encoding, cb) => { + // The state finished should start in false. + assert.strictEqual(writable._writableState.finished, false); + cb(); +}; + +writable.on('finish', common.mustCall(() => { + assert.strictEqual(writable._writableState.finished, true); +})); + +writable.end('testing finished state', common.mustCall(() => { + assert.strictEqual(writable._writableState.finished, true); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-finished.js b/test/parallel/test-stream-writable-finished.js new file mode 100644 index 0000000000..28a2eaa03d --- /dev/null +++ b/test/parallel/test-stream-writable-finished.js @@ -0,0 +1,114 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Writable } = require('../../lib'); +const assert = require('assert'); + +// basic +{ + // Find it on Writable.prototype + assert(Reflect.has(Writable.prototype, 'writableFinished')); +} + +// event +{ + const writable = new Writable(); + + writable._write = (chunk, encoding, cb) => { + // The state finished should start in false. + assert.strictEqual(writable.writableFinished, false); + cb(); + }; + + writable.on('finish', common.mustCall(() => { + assert.strictEqual(writable.writableFinished, true); + })); + + writable.end('testing finished state', common.mustCall(() => { + assert.strictEqual(writable.writableFinished, true); + })); +} + +{ + // Emit finish asynchronously. + + const w = new Writable({ + write(chunk, encoding, cb) { + cb(); + } + }); + + w.end(); + w.on('finish', common.mustCall()); +} + +{ + // Emit prefinish synchronously. + + const w = new Writable({ + write(chunk, encoding, cb) { + cb(); + } + }); + + let sync = true; + w.on('prefinish', common.mustCall(() => { + assert.strictEqual(sync, true); + })); + w.end(); + sync = false; +} + +{ + // Emit prefinish synchronously w/ final. + + const w = new Writable({ + write(chunk, encoding, cb) { + cb(); + }, + final(cb) { + cb(); + } + }); + + let sync = true; + w.on('prefinish', common.mustCall(() => { + assert.strictEqual(sync, true); + })); + w.end(); + sync = false; +} + + +{ + // Call _final synchronously. + + let sync = true; + const w = new Writable({ + write(chunk, encoding, cb) { + cb(); + }, + final: common.mustCall((cb) => { + assert.strictEqual(sync, true); + cb(); + }) + }); + + w.end(); + sync = false; +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-invalid-chunk.js b/test/parallel/test-stream-writable-invalid-chunk.js new file mode 100644 index 0000000000..c3394abf9b --- /dev/null +++ b/test/parallel/test-stream-writable-invalid-chunk.js @@ -0,0 +1,51 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +function testWriteType(val, objectMode, code) { + const writable = new stream.Writable({ + objectMode, + write: () => {} + }); + writable.on('error', common.mustNotCall()); + if (code) { + assert.throws(() => { + writable.write(val); + }, { code }); + } else { + writable.write(val); + } +} + +testWriteType([], false, 'ERR_INVALID_ARG_TYPE'); +testWriteType({}, false, 'ERR_INVALID_ARG_TYPE'); +testWriteType(0, false, 'ERR_INVALID_ARG_TYPE'); +testWriteType(true, false, 'ERR_INVALID_ARG_TYPE'); +testWriteType(0.0, false, 'ERR_INVALID_ARG_TYPE'); +testWriteType(undefined, false, 'ERR_INVALID_ARG_TYPE'); +testWriteType(null, false, 'ERR_STREAM_NULL_VALUES'); + +testWriteType([], true); +testWriteType({}, true); +testWriteType(0, true); +testWriteType(true, true); +testWriteType(0.0, true); +testWriteType(undefined, true); +testWriteType(null, true, 'ERR_STREAM_NULL_VALUES'); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-needdrain-state.js b/test/parallel/test-stream-writable-needdrain-state.js new file mode 100644 index 0000000000..4ccc328a5e --- /dev/null +++ b/test/parallel/test-stream-writable-needdrain-state.js @@ -0,0 +1,40 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const stream = require('../../lib'); +const assert = require('assert'); + +const transform = new stream.Transform({ + transform: _transform, + highWaterMark: 1 +}); + +function _transform(chunk, encoding, cb) { + process.nextTick(() => { + assert.strictEqual(transform._writableState.needDrain, true); + cb(); + }); +} + +assert.strictEqual(transform._writableState.needDrain, false); + +transform.write('asdasd', common.mustCall(() => { + assert.strictEqual(transform._writableState.needDrain, false); +})); + +assert.strictEqual(transform._writableState.needDrain, true); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js new file mode 100644 index 0000000000..17ebd539d3 --- /dev/null +++ b/test/parallel/test-stream-writable-null.js @@ -0,0 +1,62 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); + +class MyWritable extends stream.Writable { + constructor(options) { + super({ autoDestroy: false, ...options }); + } + _write(chunk, encoding, callback) { + assert.notStrictEqual(chunk, null); + callback(); + } +} + +{ + const m = new MyWritable({ objectMode: true }); + m.on('error', common.mustNotCall()); + assert.throws(() => { + m.write(null); + }, { + code: 'ERR_STREAM_NULL_VALUES' + }); +} + +{ + const m = new MyWritable(); + m.on('error', common.mustNotCall()); + assert.throws(() => { + m.write(false); + }, { + code: 'ERR_INVALID_ARG_TYPE' + }); +} + +{ // Should not throw. + const m = new MyWritable({ objectMode: true }); + m.write(false, assert.ifError); +} + +{ // Should not throw. + const m = new MyWritable({ objectMode: true }).on('error', (e) => { + assert.ifError(e || new Error('should not get here')); + }); + m.write(false, assert.ifError); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-properties.js b/test/parallel/test-stream-writable-properties.js new file mode 100644 index 0000000000..712673129d --- /dev/null +++ b/test/parallel/test-stream-writable-properties.js @@ -0,0 +1,37 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const { Writable } = require('../../lib'); + +{ + const w = new Writable(); + assert.strictEqual(w.writableCorked, 0); + w.uncork(); + assert.strictEqual(w.writableCorked, 0); + w.cork(); + assert.strictEqual(w.writableCorked, 1); + w.cork(); + assert.strictEqual(w.writableCorked, 2); + w.uncork(); + assert.strictEqual(w.writableCorked, 1); + w.uncork(); + assert.strictEqual(w.writableCorked, 0); + w.uncork(); + assert.strictEqual(w.writableCorked, 0); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-samecb-singletick.js b/test/parallel/test-stream-writable-samecb-singletick.js new file mode 100644 index 0000000000..d0d594fdaa --- /dev/null +++ b/test/parallel/test-stream-writable-samecb-singletick.js @@ -0,0 +1,51 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Console } = require('console'); +const { Writable } = require('../../lib'); +const async_hooks = require('async_hooks'); + +// Make sure that repeated calls to silentConsole.log(), and by extension +// stream.write() for the underlying stream, allocate exactly 1 tick object. +// At the time of writing, that is enough to ensure a flat memory profile +// from repeated silentConsole.log() calls, rather than having callbacks pile up +// over time, assuming that data can be written synchronously. +// Refs: https://github.com/nodejs/node/issues/18013 +// Refs: https://github.com/nodejs/node/issues/18367 + +const checkTickCreated = common.mustCall(); + +const hook = async_hooks.createHook({ + init(id, type, triggerId, resoure) { + if (type === 'TickObject') checkTickCreated(); + } +}).enable(); + +const console = new Console(new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + cb(); + }, 100) +})); + +for (let i = 0; i < 100; i++) + console.log(i); + + /* replacement start */ + process.on('beforeExit', (code) => { + hook.disable(); + }); + /* replacement end */ + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-writable.js b/test/parallel/test-stream-writable-writable.js new file mode 100644 index 0000000000..662e00ceaa --- /dev/null +++ b/test/parallel/test-stream-writable-writable.js @@ -0,0 +1,63 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const { Writable } = require('../../lib'); + +{ + const w = new Writable({ + write() {} + }); + assert.strictEqual(w.writable, true); + w.destroy(); + assert.strictEqual(w.writable, false); +} + +{ + const w = new Writable({ + write: common.mustCall((chunk, encoding, callback) => { + callback(new Error()); + }) + }); + assert.strictEqual(w.writable, true); + w.write('asd'); + assert.strictEqual(w.writable, false); + w.on('error', common.mustCall()); +} + +{ + const w = new Writable({ + write: common.mustCall((chunk, encoding, callback) => { + process.nextTick(() => { + callback(new Error()); + assert.strictEqual(w.writable, false); + }); + }) + }); + w.write('asd'); + w.on('error', common.mustCall()); +} + +{ + const w = new Writable({ + write: common.mustNotCall() + }); + assert.strictEqual(w.writable, true); + w.end(); + assert.strictEqual(w.writable, false); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-write-cb-error.js b/test/parallel/test-stream-writable-write-cb-error.js new file mode 100644 index 0000000000..696ae67501 --- /dev/null +++ b/test/parallel/test-stream-writable-write-cb-error.js @@ -0,0 +1,73 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Writable } = require('../../lib'); +const assert = require('assert'); + +// Ensure callback is always invoked before +// error is emitted. Regardless if error was +// sync or async. + +{ + let callbackCalled = false; + // Sync Error + const writable = new Writable({ + write: common.mustCall((buf, enc, cb) => { + cb(new Error()); + }) + }); + writable.on('error', common.mustCall(() => { + assert.strictEqual(callbackCalled, true); + })); + writable.write('hi', common.mustCall(() => { + callbackCalled = true; + })); +} + +{ + let callbackCalled = false; + // Async Error + const writable = new Writable({ + write: common.mustCall((buf, enc, cb) => { + process.nextTick(cb, new Error()); + }) + }); + writable.on('error', common.mustCall(() => { + assert.strictEqual(callbackCalled, true); + })); + writable.write('hi', common.mustCall(() => { + callbackCalled = true; + })); +} + +{ + // Sync Error + const writable = new Writable({ + write: common.mustCall((buf, enc, cb) => { + cb(new Error()); + }) + }); + + writable.on('error', common.mustCall()); + + let cnt = 0; + // Ensure we don't live lock on sync error + while (writable.write('a')) + cnt++; + + assert.strictEqual(cnt, 0); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-write-cb-twice.js b/test/parallel/test-stream-writable-write-cb-twice.js new file mode 100644 index 0000000000..8b605562ed --- /dev/null +++ b/test/parallel/test-stream-writable-write-cb-twice.js @@ -0,0 +1,67 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Writable } = require('../../lib'); + +{ + // Sync + Sync + const writable = new Writable({ + write: common.mustCall((buf, enc, cb) => { + cb(); + cb(); + }) + }); + writable.write('hi'); + writable.on('error', common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK', + name: 'Error' + })); +} + +{ + // Sync + Async + const writable = new Writable({ + write: common.mustCall((buf, enc, cb) => { + cb(); + process.nextTick(() => { + cb(); + }); + }) + }); + writable.write('hi'); + writable.on('error', common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK', + name: 'Error' + })); +} + +{ + // Async + Async + const writable = new Writable({ + write: common.mustCall((buf, enc, cb) => { + process.nextTick(cb); + process.nextTick(() => { + cb(); + }); + }) + }); + writable.write('hi'); + writable.on('error', common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK', + name: 'Error' + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-write-error.js b/test/parallel/test-stream-writable-write-error.js new file mode 100644 index 0000000000..ed44285ee4 --- /dev/null +++ b/test/parallel/test-stream-writable-write-error.js @@ -0,0 +1,90 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const { Writable } = require('../../lib'); + +function expectError(w, args, code, sync) { + if (sync) { + if (code) { + assert.throws(() => w.write(...args), { code }); + } else { + w.write(...args); + } + } else { + let errorCalled = false; + let ticked = false; + w.write(...args, common.mustCall((err) => { + assert.strictEqual(ticked, true); + assert.strictEqual(errorCalled, false); + assert.strictEqual(err.code, code); + })); + ticked = true; + w.on('error', common.mustCall((err) => { + errorCalled = true; + assert.strictEqual(err.code, code); + })); + } +} + +function test(autoDestroy) { + { + const w = new Writable({ + autoDestroy, + _write() {} + }); + w.end(); + expectError(w, ['asd'], 'ERR_STREAM_WRITE_AFTER_END'); + } + + { + const w = new Writable({ + autoDestroy, + _write() {} + }); + w.destroy(); + } + + { + const w = new Writable({ + autoDestroy, + _write() {} + }); + expectError(w, [null], 'ERR_STREAM_NULL_VALUES', true); + } + + { + const w = new Writable({ + autoDestroy, + _write() {} + }); + expectError(w, [{}], 'ERR_INVALID_ARG_TYPE', true); + } + + { + const w = new Writable({ + decodeStrings: false, + autoDestroy, + _write() {} + }); + expectError(w, ['asd', 'noencoding'], 'ERR_UNKNOWN_ENCODING', true); + } +} + +test(false); +test(true); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-write-writev-finish.js b/test/parallel/test-stream-writable-write-writev-finish.js new file mode 100644 index 0000000000..05f404ebad --- /dev/null +++ b/test/parallel/test-stream-writable-write-writev-finish.js @@ -0,0 +1,167 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +// Ensure consistency between the finish event when using cork() +// and writev and when not using them + +{ + const writable = new stream.Writable(); + + writable._write = (chunks, encoding, cb) => { + cb(new Error('write test error')); + }; + + writable.on('finish', common.mustNotCall()); + writable.on('prefinish', common.mustNotCall()); + writable.on('error', common.mustCall((er) => { + assert.strictEqual(er.message, 'write test error'); + })); + + writable.end('test'); +} + +{ + const writable = new stream.Writable(); + + writable._write = (chunks, encoding, cb) => { + setImmediate(cb, new Error('write test error')); + }; + + writable.on('finish', common.mustNotCall()); + writable.on('prefinish', common.mustNotCall()); + writable.on('error', common.mustCall((er) => { + assert.strictEqual(er.message, 'write test error'); + })); + + writable.end('test'); +} + +{ + const writable = new stream.Writable(); + + writable._write = (chunks, encoding, cb) => { + cb(new Error('write test error')); + }; + + writable._writev = (chunks, cb) => { + cb(new Error('writev test error')); + }; + + writable.on('finish', common.mustNotCall()); + writable.on('prefinish', common.mustNotCall()); + writable.on('error', common.mustCall((er) => { + assert.strictEqual(er.message, 'writev test error'); + })); + + writable.cork(); + writable.write('test'); + + setImmediate(function() { + writable.end('test'); + }); +} + +{ + const writable = new stream.Writable(); + + writable._write = (chunks, encoding, cb) => { + setImmediate(cb, new Error('write test error')); + }; + + writable._writev = (chunks, cb) => { + setImmediate(cb, new Error('writev test error')); + }; + + writable.on('finish', common.mustNotCall()); + writable.on('prefinish', common.mustNotCall()); + writable.on('error', common.mustCall((er) => { + assert.strictEqual(er.message, 'writev test error'); + })); + + writable.cork(); + writable.write('test'); + + setImmediate(function() { + writable.end('test'); + }); +} + +// Regression test for +// https://github.com/nodejs/node/issues/13812 + +{ + const rs = new stream.Readable(); + rs.push('ok'); + rs.push(null); + rs._read = () => {}; + + const ws = new stream.Writable(); + + ws.on('finish', common.mustNotCall()); + ws.on('error', common.mustCall()); + + ws._write = (chunk, encoding, done) => { + setImmediate(done, new Error()); + }; + rs.pipe(ws); +} + +{ + const rs = new stream.Readable(); + rs.push('ok'); + rs.push(null); + rs._read = () => {}; + + const ws = new stream.Writable(); + + ws.on('finish', common.mustNotCall()); + ws.on('error', common.mustCall()); + + ws._write = (chunk, encoding, done) => { + done(new Error()); + }; + rs.pipe(ws); +} + +{ + const w = new stream.Writable(); + w._write = (chunk, encoding, cb) => { + process.nextTick(cb); + }; + w.on('error', common.mustCall()); + w.on('finish', common.mustNotCall()); + w.on('prefinish', () => { + w.write("shouldn't write in prefinish listener"); + }); + w.end(); +} + +{ + const w = new stream.Writable(); + w._write = (chunk, encoding, cb) => { + process.nextTick(cb); + }; + w.on('error', common.mustCall()); + w.on('finish', () => { + w.write("shouldn't write in finish listener"); + }); + w.end(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writableState-ending.js b/test/parallel/test-stream-writableState-ending.js new file mode 100644 index 0000000000..52613a74b0 --- /dev/null +++ b/test/parallel/test-stream-writableState-ending.js @@ -0,0 +1,52 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); + +const assert = require('assert'); +const stream = require('../../lib'); + +const writable = new stream.Writable(); + +function testStates(ending, finished, ended) { + assert.strictEqual(writable._writableState.ending, ending); + assert.strictEqual(writable._writableState.finished, finished); + assert.strictEqual(writable._writableState.ended, ended); +} + +writable._write = (chunk, encoding, cb) => { + // Ending, finished, ended start in false. + testStates(false, false, false); + cb(); +}; + +writable.on('finish', () => { + // Ending, finished, ended = true. + testStates(true, true, true); +}); + +const result = writable.end('testing function end()', () => { + // Ending, finished, ended = true. + testStates(true, true, true); +}); + +// End returns the writable instance +assert.strictEqual(result, writable); + +// Ending, ended = true. +// finished = false. +testStates(true, false, true); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js new file mode 100644 index 0000000000..ed329fceac --- /dev/null +++ b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js @@ -0,0 +1,72 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +const writable = new stream.Writable(); + +writable._writev = common.mustCall((chunks, cb) => { + assert.strictEqual(chunks.length, 2); + cb(); +}, 1); + +writable._write = common.mustCall((chunk, encoding, cb) => { + cb(); +}, 1); + +// first cork +writable.cork(); +assert.strictEqual(writable._writableState.corked, 1); +assert.strictEqual(writable._writableState.bufferedRequestCount, 0); + +// cork again +writable.cork(); +assert.strictEqual(writable._writableState.corked, 2); + +// The first chunk is buffered +writable.write('first chunk'); +assert.strictEqual(writable._writableState.bufferedRequestCount, 1); + +// First uncork does nothing +writable.uncork(); +assert.strictEqual(writable._writableState.corked, 1); +assert.strictEqual(writable._writableState.bufferedRequestCount, 1); + +process.nextTick(uncork); + +// The second chunk is buffered, because we uncork at the end of tick +writable.write('second chunk'); +assert.strictEqual(writable._writableState.corked, 1); +assert.strictEqual(writable._writableState.bufferedRequestCount, 2); + +function uncork() { + // Second uncork flushes the buffer + writable.uncork(); + assert.strictEqual(writable._writableState.corked, 0); + assert.strictEqual(writable._writableState.bufferedRequestCount, 0); + + // Verify that end() uncorks correctly + writable.cork(); + writable.write('third chunk'); + writable.end(); + + // End causes an uncork() as well + assert.strictEqual(writable._writableState.corked, 0); + assert.strictEqual(writable._writableState.bufferedRequestCount, 0); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js new file mode 100644 index 0000000000..b89cf454c9 --- /dev/null +++ b/test/parallel/test-stream-write-destroy.js @@ -0,0 +1,83 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const { Writable } = require('../../lib'); + +// Test interaction between calling .destroy() on a writable and pending +// writes. + +for (const withPendingData of [ false, true ]) { + for (const useEnd of [ false, true ]) { + const callbacks = []; + + const w = new Writable({ + write(data, enc, cb) { + callbacks.push(cb); + }, + // Effectively disable the HWM to observe 'drain' events more easily. + highWaterMark: 1 + }); + + let chunksWritten = 0; + let drains = 0; + let finished = false; + w.on('drain', () => drains++); + w.on('finish', () => finished = true); + + function onWrite(err) { + if (err) { + assert.strictEqual(w.destroyed, true); + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + } else { + chunksWritten++; + } + } + + w.write('abc', onWrite); + assert.strictEqual(chunksWritten, 0); + assert.strictEqual(drains, 0); + callbacks.shift()(); + assert.strictEqual(chunksWritten, 1); + assert.strictEqual(drains, 1); + + if (withPendingData) { + // Test 2 cases: There either is or is not data still in the write queue. + // (The second write will never actually get executed either way.) + w.write('def', onWrite); + } + if (useEnd) { + // Again, test 2 cases: Either we indicate that we want to end the + // writable or not. + w.end('ghi', onWrite); + } else { + w.write('ghi', onWrite); + } + + assert.strictEqual(chunksWritten, 1); + w.destroy(); + assert.strictEqual(chunksWritten, 1); + callbacks.shift()(); + assert.strictEqual(chunksWritten, useEnd && !withPendingData ? 1 : 2); + assert.strictEqual(callbacks.length, 0); + assert.strictEqual(drains, 1); + + // When we used `.end()`, we see the 'finished' event if and only if + // we actually finished processing the write queue. + assert.strictEqual(finished, !withPendingData && useEnd); + } +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-write-drain.js b/test/parallel/test-stream-write-drain.js new file mode 100644 index 0000000000..a094bef673 --- /dev/null +++ b/test/parallel/test-stream-write-drain.js @@ -0,0 +1,31 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const { Writable } = require('../../lib'); + +// Don't emit 'drain' if ended + +const w = new Writable({ + write(data, enc, cb) { + process.nextTick(cb); + }, + highWaterMark: 1 +}); + +w.on('drain', common.mustNotCall()); +w.write('asd'); +w.end(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-write-final.js b/test/parallel/test-stream-write-final.js new file mode 100644 index 0000000000..e7be4673f0 --- /dev/null +++ b/test/parallel/test-stream-write-final.js @@ -0,0 +1,39 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); +let shutdown = false; + +const w = new stream.Writable({ + final: common.mustCall(function(cb) { + assert.strictEqual(this, w); + setTimeout(function() { + shutdown = true; + cb(); + }, 100); + }), + write: function(chunk, e, cb) { + process.nextTick(cb); + } +}); +w.on('finish', common.mustCall(function() { + assert(shutdown); +})); +w.write(Buffer.allocUnsafe(1)); +w.end(Buffer.allocUnsafe(0)); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js new file mode 100644 index 0000000000..03b0c1222d --- /dev/null +++ b/test/parallel/test-stream-writev.js @@ -0,0 +1,145 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); + +const queue = []; +for (let decode = 0; decode < 2; decode++) { + for (let uncork = 0; uncork < 2; uncork++) { + for (let multi = 0; multi < 2; multi++) { + queue.push([!!decode, !!uncork, !!multi]); + } + } +} + +run(); + +function run() { + const t = queue.pop(); + if (t) + test(t[0], t[1], t[2], run); + else + silentConsole.log('ok'); +} + +function test(decode, uncork, multi, next) { + silentConsole.log(`# decode=${decode} uncork=${uncork} multi=${multi}`); + let counter = 0; + let expectCount = 0; + function cnt(msg) { + expectCount++; + const expect = expectCount; + return function(er) { + assert.ifError(er); + counter++; + assert.strictEqual(counter, expect); + }; + } + + const w = new stream.Writable({ decodeStrings: decode }); + w._write = common.mustNotCall('Should not call _write'); + + const expectChunks = decode ? [ + { encoding: 'buffer', + chunk: [104, 101, 108, 108, 111, 44, 32] }, + { encoding: 'buffer', + chunk: [119, 111, 114, 108, 100] }, + { encoding: 'buffer', + chunk: [33] }, + { encoding: 'buffer', + chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, + { encoding: 'buffer', + chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] }, + ] : [ + { encoding: 'ascii', chunk: 'hello, ' }, + { encoding: 'utf8', chunk: 'world' }, + { encoding: 'buffer', chunk: [33] }, + { encoding: 'latin1', chunk: '\nand then...' }, + { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }, + ]; + + let actualChunks; + w._writev = function(chunks, cb) { + actualChunks = chunks.map(function(chunk) { + return { + encoding: chunk.encoding, + chunk: Buffer.isBuffer(chunk.chunk) ? + Array.prototype.slice.call(chunk.chunk) : chunk.chunk + }; + }); + cb(); + }; + + w.cork(); + w.write('hello, ', 'ascii', cnt('hello')); + w.write('world', 'utf8', cnt('world')); + + if (multi) + w.cork(); + + w.write(Buffer.from('!'), 'buffer', cnt('!')); + w.write('\nand then...', 'latin1', cnt('and then')); + + if (multi) + w.uncork(); + + w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')); + + if (uncork) + w.uncork(); + + w.end(cnt('end')); + + w.on('finish', function() { + // Make sure finish comes after all the write cb + cnt('finish')(); + assert.deepStrictEqual(actualChunks, expectChunks); + next(); + }); +} + +{ + const w = new stream.Writable({ + writev: common.mustCall(function(chunks, cb) { + cb(); + }) + }); + w.write('asd', common.mustCall()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js new file mode 100644 index 0000000000..92708b701f --- /dev/null +++ b/test/parallel/test-stream2-base64-single-char-read-end.js @@ -0,0 +1,71 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const { Readable: R, Writable: W } = require('../../lib'); +const assert = require('assert'); + +const src = new R({ encoding: 'base64' }); +const dst = new W(); +let hasRead = false; +const accum = []; + +src._read = function(n) { + if (!hasRead) { + hasRead = true; + process.nextTick(function() { + src.push(Buffer.from('1')); + src.push(null); + }); + } +}; + +dst._write = function(chunk, enc, cb) { + accum.push(chunk); + cb(); +}; + +src.on('end', function() { + assert.strictEqual(String(Buffer.concat(accum)), 'MQ=='); + clearTimeout(timeout); +}); + +src.pipe(dst); + +const timeout = setTimeout(function() { + assert.fail('timed out waiting for _write'); +}, 100); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js new file mode 100644 index 0000000000..ec533a4305 --- /dev/null +++ b/test/parallel/test-stream2-basic.js @@ -0,0 +1,460 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable: R, Writable: W } = require('../../lib'); +const assert = require('assert'); + +const EE = require('events').EventEmitter; + +class TestReader extends R { + constructor(n) { + super(); + this._buffer = Buffer.alloc(n || 100, 'x'); + this._pos = 0; + this._bufs = 10; + } + + _read(n) { + const max = this._buffer.length - this._pos; + n = Math.max(n, 0); + const toRead = Math.min(n, max); + if (toRead === 0) { + // Simulate the read buffer filling up with some more bytes some time + // in the future. + setTimeout(() => { + this._pos = 0; + this._bufs -= 1; + if (this._bufs <= 0) { + // read them all! + if (!this.ended) + this.push(null); + } else { + // now we have more. + // kinda cheating by calling _read, but whatever, + // it's just fake anyway. + this._read(n); + } + }, 10); + return; + } + + const ret = this._buffer.slice(this._pos, this._pos + toRead); + this._pos += toRead; + this.push(ret); + } +} + +class TestWriter extends EE { + constructor() { + super(); + this.received = []; + this.flush = false; + } + + write(c) { + this.received.push(c.toString()); + this.emit('write', c); + return true; + } + + end(c) { + if (c) this.write(c); + this.emit('end', this.received); + } +} + +{ + // Test basic functionality + const r = new TestReader(20); + + const reads = []; + const expect = [ 'x', + 'xx', + 'xxx', + 'xxxx', + 'xxxxx', + 'xxxxxxxxx', + 'xxxxxxxxxx', + 'xxxxxxxxxxxx', + 'xxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxx' ]; + + r.on('end', common.mustCall(function() { + assert.deepStrictEqual(reads, expect); + })); + + let readSize = 1; + function flow() { + let res; + while (null !== (res = r.read(readSize++))) { + reads.push(res.toString()); + } + r.once('readable', flow); + } + + flow(); +} + +{ + // Verify pipe + const r = new TestReader(5); + + const expect = [ 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx' ]; + + const w = new TestWriter(); + + w.on('end', common.mustCall(function(received) { + assert.deepStrictEqual(received, expect); + })); + + r.pipe(w); +} + + +[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function(SPLIT) { + // Verify unpipe + const r = new TestReader(5); + + // Unpipe after 3 writes, then write to another stream instead. + let expect = [ 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx' ]; + expect = [ expect.slice(0, SPLIT), expect.slice(SPLIT) ]; + + const w = [ new TestWriter(), new TestWriter() ]; + + let writes = SPLIT; + w[0].on('write', function() { + if (--writes === 0) { + r.unpipe(); + assert.deepStrictEqual(r._readableState.pipes, []); + w[0].end(); + r.pipe(w[1]); + assert.deepStrictEqual(r._readableState.pipes, [w[1]]); + } + }); + + let ended = 0; + + w[0].on('end', common.mustCall(function(results) { + ended++; + assert.strictEqual(ended, 1); + assert.deepStrictEqual(results, expect[0]); + })); + + w[1].on('end', common.mustCall(function(results) { + ended++; + assert.strictEqual(ended, 2); + assert.deepStrictEqual(results, expect[1]); + })); + + r.pipe(w[0]); +}); + + +{ + // Verify both writers get the same data when piping to destinations + const r = new TestReader(5); + const w = [ new TestWriter(), new TestWriter() ]; + + const expect = [ 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx' ]; + + w[0].on('end', common.mustCall(function(received) { + assert.deepStrictEqual(received, expect); + })); + w[1].on('end', common.mustCall(function(received) { + assert.deepStrictEqual(received, expect); + })); + + r.pipe(w[0]); + r.pipe(w[1]); +} + + +[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function(SPLIT) { + // Verify multi-unpipe + const r = new TestReader(5); + + // Unpipe after 3 writes, then write to another stream instead. + let expect = [ 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx', + 'xxxxx' ]; + expect = [ expect.slice(0, SPLIT), expect.slice(SPLIT) ]; + + const w = [ new TestWriter(), new TestWriter(), new TestWriter() ]; + + let writes = SPLIT; + w[0].on('write', function() { + if (--writes === 0) { + r.unpipe(); + w[0].end(); + r.pipe(w[1]); + } + }); + + let ended = 0; + + w[0].on('end', common.mustCall(function(results) { + ended++; + assert.strictEqual(ended, 1); + assert.deepStrictEqual(results, expect[0]); + })); + + w[1].on('end', common.mustCall(function(results) { + ended++; + assert.strictEqual(ended, 2); + assert.deepStrictEqual(results, expect[1]); + })); + + r.pipe(w[0]); + r.pipe(w[2]); +}); + +{ + // Verify that back pressure is respected + const r = new R({ objectMode: true }); + r._read = common.mustNotCall(); + let counter = 0; + r.push(['one']); + r.push(['two']); + r.push(['three']); + r.push(['four']); + r.push(null); + + const w1 = new R(); + w1.write = function(chunk) { + assert.strictEqual(chunk[0], 'one'); + w1.emit('close'); + process.nextTick(function() { + r.pipe(w2); + r.pipe(w3); + }); + }; + w1.end = common.mustNotCall(); + + r.pipe(w1); + + const expected = ['two', 'two', 'three', 'three', 'four', 'four']; + + const w2 = new R(); + w2.write = function(chunk) { + assert.strictEqual(chunk[0], expected.shift()); + assert.strictEqual(counter, 0); + + counter++; + + if (chunk[0] === 'four') { + return true; + } + + setTimeout(function() { + counter--; + w2.emit('drain'); + }, 10); + + return false; + }; + w2.end = common.mustCall(); + + const w3 = new R(); + w3.write = function(chunk) { + assert.strictEqual(chunk[0], expected.shift()); + assert.strictEqual(counter, 1); + + counter++; + + if (chunk[0] === 'four') { + return true; + } + + setTimeout(function() { + counter--; + w3.emit('drain'); + }, 50); + + return false; + }; + w3.end = common.mustCall(function() { + assert.strictEqual(counter, 2); + assert.strictEqual(expected.length, 0); + }); +} + +{ + // Verify read(0) behavior for ended streams + const r = new R(); + let written = false; + let ended = false; + r._read = common.mustNotCall(); + + r.push(Buffer.from('foo')); + r.push(null); + + const v = r.read(0); + + assert.strictEqual(v, null); + + const w = new R(); + w.write = function(buffer) { + written = true; + assert.strictEqual(ended, false); + assert.strictEqual(buffer.toString(), 'foo'); + }; + + w.end = common.mustCall(function() { + ended = true; + assert.strictEqual(written, true); + }); + + r.pipe(w); +} + +{ + // Verify synchronous _read ending + const r = new R(); + let called = false; + r._read = function(n) { + r.push(null); + }; + + r.once('end', function() { + // Verify that this is called before the next tick + called = true; + }); + + r.read(); + + process.nextTick(function() { + assert.strictEqual(called, true); + }); +} + +{ + // Verify that adding readable listeners trigger data flow + const r = new R({ highWaterMark: 5 }); + let onReadable = false; + let readCalled = 0; + + r._read = function(n) { + if (readCalled++ === 2) + r.push(null); + else + r.push(Buffer.from('asdf')); + }; + + r.on('readable', function() { + onReadable = true; + r.read(); + }); + + r.on('end', common.mustCall(function() { + assert.strictEqual(readCalled, 3); + assert.ok(onReadable); + })); +} + +{ + // Verify that streams are chainable + const r = new R(); + r._read = common.mustCall(); + const r2 = r.setEncoding('utf8').pause().resume().pause(); + assert.strictEqual(r, r2); +} + +{ + // Verify readableEncoding property + assert(Reflect.has(R.prototype, 'readableEncoding')); + + const r = new R({ encoding: 'utf8' }); + assert.strictEqual(r.readableEncoding, 'utf8'); +} + +{ + // Verify readableObjectMode property + assert(Reflect.has(R.prototype, 'readableObjectMode')); + + const r = new R({ objectMode: true }); + assert.strictEqual(r.readableObjectMode, true); +} + +{ + // Verify writableObjectMode property + assert(Reflect.has(W.prototype, 'writableObjectMode')); + + const w = new W({ objectMode: true }); + assert.strictEqual(w.writableObjectMode, true); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js new file mode 100644 index 0000000000..4b07e970b8 --- /dev/null +++ b/test/parallel/test-stream2-compatibility.js @@ -0,0 +1,85 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const { Readable: R, Writable: W } = require('../../lib'); +const assert = require('assert'); + +let ondataCalled = 0; + +class TestReader extends R { + constructor() { + super(); + this._buffer = Buffer.alloc(100, 'x'); + + this.on('data', () => { + ondataCalled++; + }); + } + + _read(n) { + this.push(this._buffer); + this._buffer = Buffer.alloc(0); + } +} + +const reader = new TestReader(); +setImmediate(function() { + assert.strictEqual(ondataCalled, 1); + silentConsole.log('ok'); + reader.push(null); +}); + +class TestWriter extends W { + constructor() { + super(); + this.write('foo'); + this.end(); + } + + _write(chunk, enc, cb) { + cb(); + } +} + +const writer = new TestWriter(); + +process.on('exit', function() { + assert.strictEqual(reader.readable, false); + assert.strictEqual(writer.writable, false); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js new file mode 100644 index 0000000000..7a76fab0d6 --- /dev/null +++ b/test/parallel/test-stream2-decode-partial.js @@ -0,0 +1,38 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const { Readable } = require('../../lib'); +const assert = require('assert'); + +let buf = ''; +const euro = Buffer.from([0xE2, 0x82, 0xAC]); +const cent = Buffer.from([0xC2, 0xA2]); +const source = Buffer.concat([euro, cent]); + +const readable = Readable({ encoding: 'utf8' }); +readable.push(source.slice(0, 2)); +readable.push(source.slice(2, 4)); +readable.push(source.slice(4, 6)); +readable.push(null); + +readable.on('data', function(data) { + buf += data; +}); + +process.on('exit', function() { + assert.strictEqual(buf, '€¢'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-finish-pipe-error.js b/test/parallel/test-stream2-finish-pipe-error.js new file mode 100644 index 0000000000..c7f79939af --- /dev/null +++ b/test/parallel/test-stream2-finish-pipe-error.js @@ -0,0 +1,35 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const stream = require('../../lib'); + +process.on('uncaughtException', common.mustCall()); + +const r = new stream.Readable(); +r._read = function(size) { + r.push(Buffer.allocUnsafe(size)); +}; + +const w = new stream.Writable(); +w._write = function(data, encoding, cb) { + cb(null); +}; + +r.pipe(w); + +// end() after pipe should cause unhandled exception +w.end(); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js new file mode 100644 index 0000000000..24e014e3f7 --- /dev/null +++ b/test/parallel/test-stream2-finish-pipe.js @@ -0,0 +1,59 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const stream = require('../../lib'); + +const r = new stream.Readable(); +r._read = function(size) { + r.push(Buffer.allocUnsafe(size)); +}; + +const w = new stream.Writable(); +w._write = function(data, encoding, cb) { + process.nextTick(cb, null); +}; + +r.pipe(w); + +// end() must be called in nextTick or a WRITE_AFTER_END error occurs. +process.nextTick(() => { + // This might sound unrealistic, but it happens in net.js. When + // socket.allowHalfOpen === false, EOF will cause .destroySoon() call which + // ends the writable side of net.Socket. + w.end(); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-httpclient-response-end.js b/test/parallel/test-stream2-httpclient-response-end.js new file mode 100644 index 0000000000..b4d96aaedb --- /dev/null +++ b/test/parallel/test-stream2-httpclient-response-end.js @@ -0,0 +1,40 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); +const msg = 'Hello'; +const server = http.createServer(function(req, res) { + res.writeHead(200, { 'Content-Type': 'text/plain' }); + res.end(msg); +}).listen(0, function() { + http.get({ port: this.address().port }, function(res) { + let data = ''; + res.on('readable', common.mustCall(function() { + silentConsole.log('readable event'); + let chunk; + while ((chunk = res.read()) !== null) { + data += chunk; + } + })); + res.on('end', common.mustCall(function() { + silentConsole.log('end event'); + assert.strictEqual(msg, data); + server.close(); + })); + }); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js new file mode 100644 index 0000000000..8caef393ff --- /dev/null +++ b/test/parallel/test-stream2-large-read-stall.js @@ -0,0 +1,89 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +// If everything aligns so that you do a read(n) of exactly the +// remaining buffer, then make sure that 'end' still emits. + +const READSIZE = 100; +const PUSHSIZE = 20; +const PUSHCOUNT = 1000; +const HWM = 50; + +const Readable = require('../../lib').Readable; +const r = new Readable({ + highWaterMark: HWM +}); +const rs = r._readableState; + +r._read = push; + +r.on('readable', function() { + silentConsole.error('>> readable'); + let ret; + do { + silentConsole.error(` > read(${READSIZE})`); + ret = r.read(READSIZE); + silentConsole.error(` < ${ret && ret.length} (${rs.length} remain)`); + } while (ret && ret.length === READSIZE); + + silentConsole.error('<< after read()', + ret && ret.length, + rs.needReadable, + rs.length); +}); + +r.on('end', common.mustCall(function() { + assert.strictEqual(pushes, PUSHCOUNT + 1); +})); + +let pushes = 0; +function push() { + if (pushes > PUSHCOUNT) + return; + + if (pushes++ === PUSHCOUNT) { + silentConsole.error(' push(EOF)'); + return r.push(null); + } + + silentConsole.error(` push #${pushes}`); + if (r.push(Buffer.allocUnsafe(PUSHSIZE))) + setTimeout(push, 1); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js new file mode 100644 index 0000000000..51fafbe5bc --- /dev/null +++ b/test/parallel/test-stream2-objects.js @@ -0,0 +1,312 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Readable, Writable } = require('../../lib'); +const assert = require('assert'); + +function toArray(callback) { + const stream = new Writable({ objectMode: true }); + const list = []; + stream.write = function(chunk) { + list.push(chunk); + }; + + stream.end = common.mustCall(function() { + callback(list); + }); + + return stream; +} + +function fromArray(list) { + const r = new Readable({ objectMode: true }); + r._read = common.mustNotCall(); + list.forEach(function(chunk) { + r.push(chunk); + }); + r.push(null); + + return r; +} + +{ + // Verify that objects can be read from the stream + const r = fromArray([{ one: '1' }, { two: '2' }]); + + const v1 = r.read(); + const v2 = r.read(); + const v3 = r.read(); + + assert.deepStrictEqual(v1, { one: '1' }); + assert.deepStrictEqual(v2, { two: '2' }); + assert.strictEqual(v3, null); +} + +{ + // Verify that objects can be piped into the stream + const r = fromArray([{ one: '1' }, { two: '2' }]); + + r.pipe(toArray(common.mustCall(function(list) { + assert.deepStrictEqual(list, [ + { one: '1' }, + { two: '2' }, + ]); + }))); +} + +{ + // Verify that read(n) is ignored + const r = fromArray([{ one: '1' }, { two: '2' }]); + const value = r.read(2); + + assert.deepStrictEqual(value, { one: '1' }); +} + +{ + // Verify that objects can be synchronously read + const r = new Readable({ objectMode: true }); + const list = [{ one: '1' }, { two: '2' }]; + r._read = function(n) { + const item = list.shift(); + r.push(item || null); + }; + + r.pipe(toArray(common.mustCall(function(list) { + assert.deepStrictEqual(list, [ + { one: '1' }, + { two: '2' }, + ]); + }))); +} + +{ + // Verify that objects can be asynchronously read + const r = new Readable({ objectMode: true }); + const list = [{ one: '1' }, { two: '2' }]; + r._read = function(n) { + const item = list.shift(); + process.nextTick(function() { + r.push(item || null); + }); + }; + + r.pipe(toArray(common.mustCall(function(list) { + assert.deepStrictEqual(list, [ + { one: '1' }, + { two: '2' }, + ]); + }))); +} + +{ + // Verify that strings can be read as objects + const r = new Readable({ + objectMode: true + }); + r._read = common.mustNotCall(); + const list = ['one', 'two', 'three']; + list.forEach(function(str) { + r.push(str); + }); + r.push(null); + + r.pipe(toArray(common.mustCall(function(array) { + assert.deepStrictEqual(array, list); + }))); +} + +{ + // Verify read(0) behavior for object streams + const r = new Readable({ + objectMode: true + }); + r._read = common.mustNotCall(); + + r.push('foobar'); + r.push(null); + + r.pipe(toArray(common.mustCall(function(array) { + assert.deepStrictEqual(array, ['foobar']); + }))); +} + +{ + // Verify the behavior of pushing falsey values + const r = new Readable({ + objectMode: true + }); + r._read = common.mustNotCall(); + + r.push(false); + r.push(0); + r.push(''); + r.push(null); + + r.pipe(toArray(common.mustCall(function(array) { + assert.deepStrictEqual(array, [false, 0, '']); + }))); +} + +{ + // Verify high watermark _read() behavior + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }); + let calls = 0; + const list = ['1', '2', '3', '4', '5', '6', '7', '8']; + + r._read = function(n) { + calls++; + }; + + list.forEach(function(c) { + r.push(c); + }); + + const v = r.read(); + + assert.strictEqual(calls, 0); + assert.strictEqual(v, '1'); + + const v2 = r.read(); + assert.strictEqual(v2, '2'); + + const v3 = r.read(); + assert.strictEqual(v3, '3'); + + assert.strictEqual(calls, 1); +} + +{ + // Verify high watermark push behavior + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }); + r._read = common.mustNotCall(); + for (let i = 0; i < 6; i++) { + const bool = r.push(i); + assert.strictEqual(bool, i !== 5); + } +} + +{ + // Verify that objects can be written to stream + const w = new Writable({ objectMode: true }); + + w._write = function(chunk, encoding, cb) { + assert.deepStrictEqual(chunk, { foo: 'bar' }); + cb(); + }; + + w.on('finish', common.mustCall()); + w.write({ foo: 'bar' }); + w.end(); +} + +{ + // Verify that multiple objects can be written to stream + const w = new Writable({ objectMode: true }); + const list = []; + + w._write = function(chunk, encoding, cb) { + list.push(chunk); + cb(); + }; + + w.on('finish', common.mustCall(function() { + assert.deepStrictEqual(list, [0, 1, 2, 3, 4]); + })); + + w.write(0); + w.write(1); + w.write(2); + w.write(3); + w.write(4); + w.end(); +} + +{ + // Verify that strings can be written as objects + const w = new Writable({ + objectMode: true + }); + const list = []; + + w._write = function(chunk, encoding, cb) { + list.push(chunk); + process.nextTick(cb); + }; + + w.on('finish', common.mustCall(function() { + assert.deepStrictEqual(list, ['0', '1', '2', '3', '4']); + })); + + w.write('0'); + w.write('1'); + w.write('2'); + w.write('3'); + w.write('4'); + w.end(); +} + +{ + // Verify that stream buffers finish until callback is called + const w = new Writable({ + objectMode: true + }); + let called = false; + + w._write = function(chunk, encoding, cb) { + assert.strictEqual(chunk, 'foo'); + + process.nextTick(function() { + called = true; + cb(); + }); + }; + + w.on('finish', common.mustCall(function() { + assert.strictEqual(called, true); + })); + + w.write('foo'); + w.end(); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js new file mode 100644 index 0000000000..e68b35897b --- /dev/null +++ b/test/parallel/test-stream2-pipe-error-handling.js @@ -0,0 +1,121 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +{ + let count = 1000; + + const source = new stream.Readable(); + source._read = function(n) { + n = Math.min(count, n); + count -= n; + source.push(Buffer.allocUnsafe(n)); + }; + + let unpipedDest; + source.unpipe = function(dest) { + unpipedDest = dest; + stream.Readable.prototype.unpipe.call(this, dest); + }; + + const dest = new stream.Writable(); + dest._write = function(chunk, encoding, cb) { + cb(); + }; + + source.pipe(dest); + + let gotErr = null; + dest.on('error', function(err) { + gotErr = err; + }); + + let unpipedSource; + dest.on('unpipe', function(src) { + unpipedSource = src; + }); + + const err = new Error('This stream turned into bacon.'); + dest.emit('error', err); + assert.strictEqual(gotErr, err); + assert.strictEqual(unpipedSource, source); + assert.strictEqual(unpipedDest, dest); +} + +{ + let count = 1000; + + const source = new stream.Readable(); + source._read = function(n) { + n = Math.min(count, n); + count -= n; + source.push(Buffer.allocUnsafe(n)); + }; + + let unpipedDest; + source.unpipe = function(dest) { + unpipedDest = dest; + stream.Readable.prototype.unpipe.call(this, dest); + }; + + const dest = new stream.Writable({ autoDestroy: false }); + dest._write = function(chunk, encoding, cb) { + cb(); + }; + + source.pipe(dest); + + let unpipedSource; + dest.on('unpipe', function(src) { + unpipedSource = src; + }); + + const err = new Error('This stream turned into bacon.'); + + let gotErr = null; + try { + dest.emit('error', err); + } catch (e) { + gotErr = e; + } + assert.strictEqual(gotErr, err); + assert.strictEqual(unpipedSource, source); + assert.strictEqual(unpipedDest, dest); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js new file mode 100644 index 0000000000..b199a374e1 --- /dev/null +++ b/test/parallel/test-stream2-pipe-error-once-listener.js @@ -0,0 +1,68 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const stream = require('../../lib'); + +class Read extends stream.Readable { + _read(size) { + this.push('x'); + this.push(null); + } +} + +class Write extends stream.Writable { + _write(buffer, encoding, cb) { + this.emit('error', new Error('boom')); + this.emit('alldone'); + } +} + +const read = new Read(); +const write = new Write(); + +write.once('error', () => {}); +write.once('alldone', function(err) { + silentConsole.log('ok'); +}); + +process.on('exit', function(c) { + silentConsole.error('error thrown even with listener'); +}); + +read.pipe(write); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js new file mode 100644 index 0000000000..e61b58c13b --- /dev/null +++ b/test/parallel/test-stream2-push.js @@ -0,0 +1,151 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); + +const EE = require('events').EventEmitter; + + +// A mock thing a bit like the net.Socket/tcp_wrap.handle interaction + +const stream = new Readable({ + highWaterMark: 16, + encoding: 'utf8' +}); + +const source = new EE(); + +stream._read = function() { + silentConsole.error('stream._read'); + readStart(); +}; + +let ended = false; +stream.on('end', function() { + ended = true; +}); + +source.on('data', function(chunk) { + const ret = stream.push(chunk); + silentConsole.error('data', stream.readableLength); + if (!ret) + readStop(); +}); + +source.on('end', function() { + stream.push(null); +}); + +let reading = false; + +function readStart() { + silentConsole.error('readStart'); + reading = true; +} + +function readStop() { + silentConsole.error('readStop'); + reading = false; + process.nextTick(function() { + const r = stream.read(); + if (r !== null) + writer.write(r); + }); +} + +const writer = new Writable({ + decodeStrings: false +}); + +const written = []; + +const expectWritten = + [ 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg' ]; + +writer._write = function(chunk, encoding, cb) { + silentConsole.error(`WRITE ${chunk}`); + written.push(chunk); + process.nextTick(cb); +}; + +writer.on('finish', finish); + + +// Now emit some chunks. + +const chunk = 'asdfg'; + +let set = 0; +readStart(); +data(); +function data() { + assert(reading); + source.emit('data', chunk); + assert(reading); + source.emit('data', chunk); + assert(reading); + source.emit('data', chunk); + assert(reading); + source.emit('data', chunk); + assert(!reading); + if (set++ < 5) + setTimeout(data, 10); + else + end(); +} + +function finish() { + silentConsole.error('finish'); + assert.deepStrictEqual(written, expectWritten); + silentConsole.log('ok'); +} + +function end() { + source.emit('end'); + assert(!reading); + writer.end(stream.read()); + setImmediate(function() { + assert(ended); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js new file mode 100644 index 0000000000..24cb41068a --- /dev/null +++ b/test/parallel/test-stream2-read-sync-stack.js @@ -0,0 +1,61 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const Readable = require('../../lib').Readable; + +// This tests synchronous read callbacks and verifies that even if they nest +// heavily the process handles it without an error + +const r = new Readable(); +const N = 256 * 1024; + +let reads = 0; +r._read = function(n) { + const chunk = reads++ === N ? null : Buffer.allocUnsafe(1); + r.push(chunk); +}; + +r.on('readable', function onReadable() { + if (!(r.readableLength % 256)) + silentConsole.error('readable', r.readableLength); + r.read(N * 2); +}); + +r.on('end', common.mustCall()); + +r.read(0); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js new file mode 100644 index 0000000000..c006c2639a --- /dev/null +++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js @@ -0,0 +1,132 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const Readable = require('../../lib').Readable; + +test1(); +test2(); + +function test1() { + const r = new Readable(); + + // Should not end when we get a Buffer.alloc(0) or '' as the _read + // result that just means that there is *temporarily* no data, but to + // go ahead and try again later. + // + // note that this is very unusual. it only works for crypto streams + // because the other side of the stream will call read(0) to cycle + // data through openssl. that's why setImmediate() is used to call + // r.read(0) again later, otherwise there is no more work being done + // and the process just exits. + + const buf = Buffer.alloc(5, 'x'); + let reads = 5; + r._read = function(n) { + switch (reads--) { + case 5: + return setImmediate(() => { + return r.push(buf); + }); + case 4: + setImmediate(() => { + return r.push(Buffer.alloc(0)); + }); + return setImmediate(r.read.bind(r, 0)); + case 3: + setImmediate(r.read.bind(r, 0)); + return process.nextTick(() => { + return r.push(Buffer.alloc(0)); + }); + case 2: + setImmediate(r.read.bind(r, 0)); + return r.push(Buffer.alloc(0)); // Not-EOF! + case 1: + return r.push(buf); + case 0: + return r.push(null); // EOF + default: + throw new Error('unreachable'); + } + }; + + const results = []; + function flow() { + let chunk; + while (null !== (chunk = r.read())) + results.push(String(chunk)); + } + r.on('readable', flow); + r.on('end', () => { + results.push('EOF'); + }); + flow(); + + process.on('exit', () => { + assert.deepStrictEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]); + silentConsole.log('ok'); + }); +} + +function test2() { + const r = new Readable({ encoding: 'base64' }); + let reads = 5; + r._read = function(n) { + if (!reads--) + return r.push(null); // EOF + return r.push(Buffer.from('x')); + }; + + const results = []; + function flow() { + let chunk; + while (null !== (chunk = r.read())) + results.push(String(chunk)); + } + r.on('readable', flow); + r.on('end', () => { + results.push('EOF'); + }); + flow(); + + process.on('exit', () => { + assert.deepStrictEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]); + silentConsole.log('ok'); + }); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js new file mode 100644 index 0000000000..d706db2158 --- /dev/null +++ b/test/parallel/test-stream2-readable-from-list.js @@ -0,0 +1,112 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// Flags: --expose-internals + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const fromList = require('../../lib').Readable._fromList; +const BufferList = require('../../lib/internal/streams/buffer_list'); +const util = require('util'); + +function bufferListFromArray(arr) { + const bl = new BufferList(); + for (let i = 0; i < arr.length; ++i) + bl.push(arr[i]); + return bl; +} + +{ + // Verify behavior with buffers + let list = [ Buffer.from('foog'), + Buffer.from('bark'), + Buffer.from('bazy'), + Buffer.from('kuel') ]; + list = bufferListFromArray(list); + + + assert.strictEqual(typeof list.head, 'object'); + assert.strictEqual(typeof list.tail, 'object'); + assert.strictEqual(list.length, 4); + + + // Read more than the first element. + let ret = fromList(6, { buffer: list, length: 16 }); + assert.strictEqual(ret.toString(), 'foogba'); + + // Read exactly the first element. + ret = fromList(2, { buffer: list, length: 10 }); + assert.strictEqual(ret.toString(), 'rk'); + + // Read less than the first element. + ret = fromList(2, { buffer: list, length: 8 }); + assert.strictEqual(ret.toString(), 'ba'); + + // Read more than we have. + ret = fromList(100, { buffer: list, length: 6 }); + assert.strictEqual(ret.toString(), 'zykuel'); + + // all consumed. + assert.deepStrictEqual(list, new BufferList()); +} + +{ + // Verify behavior with strings + let list = [ 'foog', + 'bark', + 'bazy', + 'kuel' ]; + list = bufferListFromArray(list); + + // Read more than the first element. + let ret = fromList(6, { buffer: list, length: 16, decoder: true }); + assert.strictEqual(ret, 'foogba'); + + // Read exactly the first element. + ret = fromList(2, { buffer: list, length: 10, decoder: true }); + assert.strictEqual(ret, 'rk'); + + // Read less than the first element. + ret = fromList(2, { buffer: list, length: 8, decoder: true }); + assert.strictEqual(ret, 'ba'); + + // Read more than we have. + ret = fromList(100, { buffer: list, length: 6, decoder: true }); + assert.strictEqual(ret, 'zykuel'); + + // all consumed. + assert.deepStrictEqual(list, new BufferList()); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js new file mode 100644 index 0000000000..32bd97c68e --- /dev/null +++ b/test/parallel/test-stream2-readable-legacy-drain.js @@ -0,0 +1,70 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const Stream = require('../../lib'); +const Readable = Stream.Readable; + +const r = new Readable(); +const N = 256; +let reads = 0; +r._read = function(n) { + return r.push(++reads === N ? null : Buffer.allocUnsafe(1)); +}; + +r.on('end', common.mustCall()); + +const w = new Stream(); +w.writable = true; +let buffered = 0; +w.write = function(c) { + buffered += c.length; + process.nextTick(drain); + return false; +}; + +function drain() { + assert(buffered <= 3); + buffered = 0; + w.emit('drain'); +} + +w.end = common.mustCall(); + +r.pipe(w); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js new file mode 100644 index 0000000000..865487199c --- /dev/null +++ b/test/parallel/test-stream2-readable-non-empty-end.js @@ -0,0 +1,87 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable } = require('../../lib'); + +let len = 0; +const chunks = new Array(10); +for (let i = 1; i <= 10; i++) { + chunks[i - 1] = Buffer.allocUnsafe(i); + len += i; +} + +const test = new Readable(); +let n = 0; +test._read = function(size) { + const chunk = chunks[n++]; + setTimeout(function() { + test.push(chunk === undefined ? null : chunk); + }, 1); +}; + +test.on('end', thrower); +function thrower() { + throw new Error('this should not happen!'); +} + +let bytesread = 0; +test.on('readable', function() { + const b = len - bytesread - 1; + const res = test.read(b); + if (res) { + bytesread += res.length; + silentConsole.error(`br=${bytesread} len=${len}`); + setTimeout(next, 1); + } + test.read(0); +}); +test.read(0); + +function next() { + // Now let's make 'end' happen + test.removeListener('end', thrower); + test.on('end', common.mustCall()); + + // One to get the last byte + let r = test.read(); + assert(r); + assert.strictEqual(r.length, 1); + r = test.read(); + assert.strictEqual(r, null); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap-destroy.js b/test/parallel/test-stream2-readable-wrap-destroy.js new file mode 100644 index 0000000000..c7352e6523 --- /dev/null +++ b/test/parallel/test-stream2-readable-wrap-destroy.js @@ -0,0 +1,42 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const { Readable } = require('../../lib'); +const EE = require('events').EventEmitter; + +const oldStream = new EE(); +oldStream.pause = () => {}; +oldStream.resume = () => {}; + +{ + new Readable({ + autoDestroy: false, + destroy: common.mustCall() + }) + .wrap(oldStream); + oldStream.emit('destroy'); +} + +{ + new Readable({ + autoDestroy: false, + destroy: common.mustCall() + }) + .wrap(oldStream); + oldStream.emit('close'); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js new file mode 100644 index 0000000000..5fe3f04983 --- /dev/null +++ b/test/parallel/test-stream2-readable-wrap-empty.js @@ -0,0 +1,53 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const { Readable } = require('../../lib'); +const EE = require('events').EventEmitter; + +const oldStream = new EE(); +oldStream.pause = () => {}; +oldStream.resume = () => {}; + +const newStream = new Readable().wrap(oldStream); + +newStream + .on('readable', () => {}) + .on('end', common.mustCall()); + +oldStream.emit('end'); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap-error.js b/test/parallel/test-stream2-readable-wrap-error.js new file mode 100644 index 0000000000..9ac221ba8b --- /dev/null +++ b/test/parallel/test-stream2-readable-wrap-error.js @@ -0,0 +1,52 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); + +const { Readable } = require('../../lib'); +const EE = require('events').EventEmitter; + +class LegacyStream extends EE { + pause() {} + resume() {} +} + +{ + const err = new Error(); + const oldStream = new LegacyStream(); + const r = new Readable({ autoDestroy: true }) + .wrap(oldStream) + .on('error', common.mustCall(() => { + assert.strictEqual(r._readableState.errorEmitted, true); + assert.strictEqual(r._readableState.errored, err); + assert.strictEqual(r.destroyed, true); + })); + oldStream.emit('error', err); +} + +{ + const err = new Error(); + const oldStream = new LegacyStream(); + const r = new Readable({ autoDestroy: false }) + .wrap(oldStream) + .on('error', common.mustCall(() => { + assert.strictEqual(r._readableState.errorEmitted, true); + assert.strictEqual(r._readableState.errored, err); + assert.strictEqual(r.destroyed, false); + })); + oldStream.emit('error', err); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js new file mode 100644 index 0000000000..3c78ddde95 --- /dev/null +++ b/test/parallel/test-stream2-readable-wrap.js @@ -0,0 +1,115 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable, Writable } = require('../../lib'); +const EE = require('events').EventEmitter; + +function runTest(highWaterMark, objectMode, produce) { + + const old = new EE(); + const r = new Readable({ highWaterMark, objectMode }); + assert.strictEqual(r, r.wrap(old)); + + r.on('end', common.mustCall()); + + old.pause = function() { + old.emit('pause'); + flowing = false; + }; + + old.resume = function() { + old.emit('resume'); + flow(); + }; + + // Make sure pause is only emitted once. + let pausing = false; + r.on('pause', () => { + assert.strictEqual(pausing, false); + pausing = true; + process.nextTick(() => { + pausing = false; + }); + }); + + let flowing; + let chunks = 10; + let oldEnded = false; + const expected = []; + function flow() { + flowing = true; + while (flowing && chunks-- > 0) { + const item = produce(); + expected.push(item); + old.emit('data', item); + } + if (chunks <= 0) { + oldEnded = true; + old.emit('end'); + } + } + + const w = new Writable({ highWaterMark: highWaterMark * 2, + objectMode }); + const written = []; + w._write = function(chunk, encoding, cb) { + written.push(chunk); + setTimeout(cb, 1); + }; + + w.on('finish', common.mustCall(function() { + performAsserts(); + })); + + r.pipe(w); + + flow(); + + function performAsserts() { + assert(oldEnded); + assert.deepStrictEqual(written, expected); + } +} + +runTest(100, false, function() { return Buffer.allocUnsafe(100); }); +runTest(10, false, function() { return Buffer.from('xxxxxxxxxx'); }); +runTest(1, true, function() { return { foo: 'bar' }; }); + +const objectChunks = [ 5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555 ]; +runTest(1, true, function() { return objectChunks.shift(); }); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js new file mode 100644 index 0000000000..75ddb1c4f9 --- /dev/null +++ b/test/parallel/test-stream2-set-encoding.js @@ -0,0 +1,338 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { Readable: R } = require('../../lib'); + +class TestReader extends R { + constructor(n, opts) { + super(opts); + this.pos = 0; + this.len = n || 100; + } + + _read(n) { + setTimeout(() => { + if (this.pos >= this.len) { + // Double push(null) to test eos handling + this.push(null); + return this.push(null); + } + + n = Math.min(n, this.len - this.pos); + if (n <= 0) { + // Double push(null) to test eos handling + this.push(null); + return this.push(null); + } + + this.pos += n; + const ret = Buffer.alloc(n, 'a'); + + return this.push(ret); + }, 1); + } +} + +{ + // Verify utf8 encoding + const tr = new TestReader(100); + tr.setEncoding('utf8'); + const out = []; + const expect = + [ 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(10))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + + +{ + // Verify hex encoding + const tr = new TestReader(100); + tr.setEncoding('hex'); + const out = []; + const expect = + [ '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(10))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + +{ + // Verify hex encoding with read(13) + const tr = new TestReader(100); + tr.setEncoding('hex'); + const out = []; + const expect = + [ '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(13))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + +{ + // Verify base64 encoding + const tr = new TestReader(100); + tr.setEncoding('base64'); + const out = []; + const expect = + [ 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(10))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + +{ + // Verify utf8 encoding + const tr = new TestReader(100, { encoding: 'utf8' }); + const out = []; + const expect = + [ 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(10))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + + +{ + // Verify hex encoding + const tr = new TestReader(100, { encoding: 'hex' }); + const out = []; + const expect = + [ '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(10))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + +{ + // Verify hex encoding with read(13) + const tr = new TestReader(100, { encoding: 'hex' }); + const out = []; + const expect = + [ '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(13))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + +{ + // Verify base64 encoding + const tr = new TestReader(100, { encoding: 'base64' }); + const out = []; + const expect = + [ 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' ]; + + tr.on('readable', function flow() { + let chunk; + while (null !== (chunk = tr.read(10))) + out.push(chunk); + }); + + tr.on('end', common.mustCall(function() { + assert.deepStrictEqual(out, expect); + })); +} + +{ + // Verify chaining behavior + const tr = new TestReader(100); + assert.deepStrictEqual(tr.setEncoding('utf8'), tr); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js new file mode 100644 index 0000000000..a086977206 --- /dev/null +++ b/test/parallel/test-stream2-transform.js @@ -0,0 +1,485 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); +const assert = require('assert'); +const { PassThrough, Transform } = require('../../lib'); + +{ + // Verify writable side consumption + const tx = new Transform({ + highWaterMark: 10 + }); + + let transformed = 0; + tx._transform = function(chunk, encoding, cb) { + transformed += chunk.length; + tx.push(chunk); + cb(); + }; + + for (let i = 1; i <= 10; i++) { + tx.write(Buffer.allocUnsafe(i)); + } + tx.end(); + + assert.strictEqual(tx.readableLength, 10); + assert.strictEqual(transformed, 10); + assert.deepStrictEqual(tx.writableBuffer.map(function(c) { + return c.chunk.length; + }), [5, 6, 7, 8, 9, 10]); +} + +{ + // Verify passthrough behavior + const pt = new PassThrough(); + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5).toString(), 'l'); +} + +{ + // Verify object passthrough behavior + const pt = new PassThrough({ objectMode: true }); + + pt.write(1); + pt.write(true); + pt.write(false); + pt.write(0); + pt.write('foo'); + pt.write(''); + pt.write({ a: 'b' }); + pt.end(); + + assert.strictEqual(pt.read(), 1); + assert.strictEqual(pt.read(), true); + assert.strictEqual(pt.read(), false); + assert.strictEqual(pt.read(), 0); + assert.strictEqual(pt.read(), 'foo'); + assert.strictEqual(pt.read(), ''); + assert.deepStrictEqual(pt.read(), { a: 'b' }); +} + +{ + // Verify passthrough constructor behavior + const pt = PassThrough(); + + assert(pt instanceof PassThrough); +} + +{ + // Verify transform constructor behavior + const pt = Transform(); + + assert(pt instanceof Transform); +} + +{ + // Perform a simple transform + const pt = new Transform(); + pt._transform = function(c, e, cb) { + const ret = Buffer.alloc(c.length, 'x'); + pt.push(ret); + cb(); + }; + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + assert.strictEqual(pt.read(5).toString(), 'xxxxx'); + assert.strictEqual(pt.read(5).toString(), 'xxxxx'); + assert.strictEqual(pt.read(5).toString(), 'xxxxx'); + assert.strictEqual(pt.read(5).toString(), 'x'); +} + +{ + // Verify simple object transform + const pt = new Transform({ objectMode: true }); + pt._transform = function(c, e, cb) { + pt.push(JSON.stringify(c)); + cb(); + }; + + pt.write(1); + pt.write(true); + pt.write(false); + pt.write(0); + pt.write('foo'); + pt.write(''); + pt.write({ a: 'b' }); + pt.end(); + + assert.strictEqual(pt.read(), '1'); + assert.strictEqual(pt.read(), 'true'); + assert.strictEqual(pt.read(), 'false'); + assert.strictEqual(pt.read(), '0'); + assert.strictEqual(pt.read(), '"foo"'); + assert.strictEqual(pt.read(), '""'); + assert.strictEqual(pt.read(), '{"a":"b"}'); +} + +{ + // Verify async passthrough + const pt = new Transform(); + pt._transform = function(chunk, encoding, cb) { + setTimeout(function() { + pt.push(chunk); + cb(); + }, 10); + }; + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + pt.on('finish', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5).toString(), 'l'); + })); +} + +{ + // Verify asymmetric transform (expand) + const pt = new Transform(); + + // Emit each chunk 2 times. + pt._transform = function(chunk, encoding, cb) { + setTimeout(function() { + pt.push(chunk); + setTimeout(function() { + pt.push(chunk); + cb(); + }, 10); + }, 10); + }; + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + pt.end(); + + pt.on('finish', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'foogf'); + assert.strictEqual(pt.read(5).toString(), 'oogba'); + assert.strictEqual(pt.read(5).toString(), 'rkbar'); + assert.strictEqual(pt.read(5).toString(), 'kbazy'); + assert.strictEqual(pt.read(5).toString(), 'bazyk'); + assert.strictEqual(pt.read(5).toString(), 'uelku'); + assert.strictEqual(pt.read(5).toString(), 'el'); + })); +} + +{ + // Verify asymmetric transform (compress) + const pt = new Transform(); + + // Each output is the first char of 3 consecutive chunks, + // or whatever's left. + pt.state = ''; + + pt._transform = function(chunk, encoding, cb) { + if (!chunk) + chunk = ''; + const s = chunk.toString(); + setTimeout(() => { + this.state += s.charAt(0); + if (this.state.length === 3) { + pt.push(Buffer.from(this.state)); + this.state = ''; + } + cb(); + }, 10); + }; + + pt._flush = function(cb) { + // Just output whatever we have. + pt.push(Buffer.from(this.state)); + this.state = ''; + cb(); + }; + + pt.write(Buffer.from('aaaa')); + pt.write(Buffer.from('bbbb')); + pt.write(Buffer.from('cccc')); + pt.write(Buffer.from('dddd')); + pt.write(Buffer.from('eeee')); + pt.write(Buffer.from('aaaa')); + pt.write(Buffer.from('bbbb')); + pt.write(Buffer.from('cccc')); + pt.write(Buffer.from('dddd')); + pt.write(Buffer.from('eeee')); + pt.write(Buffer.from('aaaa')); + pt.write(Buffer.from('bbbb')); + pt.write(Buffer.from('cccc')); + pt.write(Buffer.from('dddd')); + pt.end(); + + // 'abcdeabcdeabcd' + pt.on('finish', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'abcde'); + assert.strictEqual(pt.read(5).toString(), 'abcde'); + assert.strictEqual(pt.read(5).toString(), 'abcd'); + })); +} + +// This tests for a stall when data is written to a full stream +// that has empty transforms. +{ + // Verify complex transform behavior + let count = 0; + let saved = null; + const pt = new Transform({ highWaterMark: 3 }); + pt._transform = function(c, e, cb) { + if (count++ === 1) + saved = c; + else { + if (saved) { + pt.push(saved); + saved = null; + } + pt.push(c); + } + + cb(); + }; + + pt.once('readable', function() { + process.nextTick(function() { + pt.write(Buffer.from('d')); + pt.write(Buffer.from('ef'), common.mustCall(function() { + pt.end(); + })); + assert.strictEqual(pt.read().toString(), 'abcdef'); + assert.strictEqual(pt.read(), null); + }); + }); + + pt.write(Buffer.from('abc')); +} + + +{ + // Verify passthrough event emission + const pt = new PassThrough(); + let emits = 0; + pt.on('readable', function() { + emits++; + }); + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + + assert.strictEqual(emits, 0); + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(String(pt.read(5)), 'null'); + assert.strictEqual(emits, 0); + + pt.write(Buffer.from('bazy')); + pt.write(Buffer.from('kuel')); + + assert.strictEqual(emits, 0); + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5), null); + + pt.end(); + + assert.strictEqual(emits, 1); + assert.strictEqual(pt.read(5).toString(), 'l'); + assert.strictEqual(pt.read(5), null); + assert.strictEqual(emits, 1); +} + +{ + // Verify passthrough event emission reordering + const pt = new PassThrough(); + let emits = 0; + pt.on('readable', function() { + emits++; + }); + + pt.write(Buffer.from('foog')); + pt.write(Buffer.from('bark')); + + assert.strictEqual(emits, 0); + assert.strictEqual(pt.read(5).toString(), 'foogb'); + assert.strictEqual(pt.read(5), null); + + pt.once('readable', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'arkba'); + assert.strictEqual(pt.read(5), null); + + pt.once('readable', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'zykue'); + assert.strictEqual(pt.read(5), null); + pt.once('readable', common.mustCall(function() { + assert.strictEqual(pt.read(5).toString(), 'l'); + assert.strictEqual(pt.read(5), null); + assert.strictEqual(emits, 3); + })); + pt.end(); + })); + pt.write(Buffer.from('kuel')); + })); + + pt.write(Buffer.from('bazy')); +} + +{ + // Verify passthrough facade + const pt = new PassThrough(); + const datas = []; + pt.on('data', function(chunk) { + datas.push(chunk.toString()); + }); + + pt.on('end', common.mustCall(function() { + assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel']); + })); + + pt.write(Buffer.from('foog')); + setTimeout(function() { + pt.write(Buffer.from('bark')); + setTimeout(function() { + pt.write(Buffer.from('bazy')); + setTimeout(function() { + pt.write(Buffer.from('kuel')); + setTimeout(function() { + pt.end(); + }, 10); + }, 10); + }, 10); + }, 10); +} + +{ + // Verify object transform (JSON parse) + const jp = new Transform({ objectMode: true }); + jp._transform = function(data, encoding, cb) { + try { + jp.push(JSON.parse(data)); + cb(); + } catch (er) { + cb(er); + } + }; + + // Anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [ + { foo: 'bar' }, + 100, + 'string', + { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }, + ]; + + let ended = false; + jp.on('end', function() { + ended = true; + }); + + objects.forEach(function(obj) { + jp.write(JSON.stringify(obj)); + const res = jp.read(); + assert.deepStrictEqual(res, obj); + }); + + jp.end(); + // Read one more time to get the 'end' event + jp.read(); + + process.nextTick(common.mustCall(function() { + assert.strictEqual(ended, true); + })); +} + +{ + // Verify object transform (JSON stringify) + const js = new Transform({ objectMode: true }); + js._transform = function(data, encoding, cb) { + try { + js.push(JSON.stringify(data)); + cb(); + } catch (er) { + cb(er); + } + }; + + // Anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [ + { foo: 'bar' }, + 100, + 'string', + { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }, + ]; + + let ended = false; + js.on('end', function() { + ended = true; + }); + + objects.forEach(function(obj) { + js.write(obj); + const res = js.read(); + assert.strictEqual(res, JSON.stringify(obj)); + }); + + js.end(); + // Read one more time to get the 'end' event + js.read(); + + process.nextTick(common.mustCall(function() { + assert.strictEqual(ended, true); + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js new file mode 100644 index 0000000000..5557077f57 --- /dev/null +++ b/test/parallel/test-stream2-unpipe-drain.js @@ -0,0 +1,87 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); + +class TestWriter extends stream.Writable { + _write(buffer, encoding, callback) { + silentConsole.log('write called'); + // Super slow write stream (callback never called) + } +} + +const dest = new TestWriter(); + +class TestReader extends stream.Readable { + constructor() { + super(); + this.reads = 0; + } + + _read(size) { + this.reads += 1; + this.push(Buffer.alloc(size)); + } +} + +const src1 = new TestReader(); +const src2 = new TestReader(); + +src1.pipe(dest); + +src1.once('readable', () => { + process.nextTick(() => { + + src2.pipe(dest); + + src2.once('readable', () => { + process.nextTick(() => { + + src1.unpipe(dest); + }); + }); + }); +}); + + +process.on('exit', () => { + assert.strictEqual(src1.reads, 2); + assert.strictEqual(src2.reads, 2); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js new file mode 100644 index 0000000000..af96915cac --- /dev/null +++ b/test/parallel/test-stream2-unpipe-leak.js @@ -0,0 +1,88 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); + +const chunk = Buffer.from('hallo'); + +class TestWriter extends stream.Writable { + _write(buffer, encoding, callback) { + callback(null); + } +} + +const dest = new TestWriter(); + +// Set this high so that we'd trigger a nextTick warning +// and/or RangeError if we do maybeReadMore wrong. +class TestReader extends stream.Readable { + constructor() { + super({ + highWaterMark: 0x10000 + }); + } + + _read(size) { + this.push(chunk); + } +} + +const src = new TestReader(); + +for (let i = 0; i < 10; i++) { + src.pipe(dest); + src.unpipe(dest); +} + +assert.strictEqual(src.listeners('end').length, 0); +assert.strictEqual(src.listeners('readable').length, 0); + +assert.strictEqual(dest.listeners('unpipe').length, 0); +assert.strictEqual(dest.listeners('drain').length, 0); +assert.strictEqual(dest.listeners('error').length, 0); +assert.strictEqual(dest.listeners('close').length, 0); +assert.strictEqual(dest.listeners('finish').length, 0); + +silentConsole.error(src._readableState); +process.on('exit', function() { + src.readableBuffer.length = 0; + silentConsole.error(src._readableState); + assert(src.readableLength >= src.readableHighWaterMark); + silentConsole.log('ok'); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js new file mode 100644 index 0000000000..bb5c04a777 --- /dev/null +++ b/test/parallel/test-stream2-writable.js @@ -0,0 +1,474 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { Writable: W, Duplex: D } = require('../../lib'); +const assert = require('assert'); + +class TestWriter extends W { + constructor(opts) { + super(opts); + this.buffer = []; + this.written = 0; + } + + _write(chunk, encoding, cb) { + // Simulate a small unpredictable latency + setTimeout(() => { + this.buffer.push(chunk.toString()); + this.written += chunk.length; + cb(); + }, Math.floor(Math.random() * 10)); + } +} + +const chunks = new Array(50); +for (let i = 0; i < chunks.length; i++) { + chunks[i] = 'x'.repeat(i); +} + +{ + // Verify fast writing + const tw = new TestWriter({ + highWaterMark: 100 + }); + + tw.on('finish', common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + })); + + chunks.forEach(function(chunk) { + // Ignore backpressure. Just buffer it all up. + tw.write(chunk); + }); + tw.end(); +} + +{ + // Verify slow writing + const tw = new TestWriter({ + highWaterMark: 100 + }); + + tw.on('finish', common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + })); + + let i = 0; + (function W() { + tw.write(chunks[i++]); + if (i < chunks.length) + setTimeout(W, 10); + else + tw.end(); + })(); +} + +{ + // Verify write backpressure + const tw = new TestWriter({ + highWaterMark: 50 + }); + + let drains = 0; + + tw.on('finish', common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + assert.strictEqual(drains, 17); + })); + + tw.on('drain', function() { + drains++; + }); + + let i = 0; + (function W() { + let ret; + do { + ret = tw.write(chunks[i++]); + } while (ret !== false && i < chunks.length); + + if (i < chunks.length) { + assert(tw.writableLength >= 50); + tw.once('drain', W); + } else { + tw.end(); + } + })(); +} + +{ + // Verify write buffersize + const tw = new TestWriter({ + highWaterMark: 100 + }); + + const encodings = + [ 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined ]; + + tw.on('finish', function() { + // Got the expected chunks + assert.deepStrictEqual(tw.buffer, chunks); + }); + + chunks.forEach(function(chunk, i) { + const enc = encodings[i % encodings.length]; + chunk = Buffer.from(chunk); + tw.write(chunk.toString(enc), enc); + }); +} + +{ + // Verify write with no buffersize + const tw = new TestWriter({ + highWaterMark: 100, + decodeStrings: false + }); + + tw._write = function(chunk, encoding, cb) { + assert.strictEqual(typeof chunk, 'string'); + chunk = Buffer.from(chunk, encoding); + return TestWriter.prototype._write.call(this, chunk, encoding, cb); + }; + + const encodings = + [ 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined ]; + + tw.on('finish', function() { + // Got the expected chunks + assert.deepStrictEqual(tw.buffer, chunks); + }); + + chunks.forEach(function(chunk, i) { + const enc = encodings[i % encodings.length]; + chunk = Buffer.from(chunk); + tw.write(chunk.toString(enc), enc); + }); +} + +{ + // Verify write callbacks + const callbacks = chunks.map(function(chunk, i) { + return [i, function() { + callbacks._called[i] = chunk; + }]; + }).reduce(function(set, x) { + set[`callback-${x[0]}`] = x[1]; + return set; + }, {}); + callbacks._called = []; + + const tw = new TestWriter({ + highWaterMark: 100 + }); + + tw.on('finish', common.mustCall(function() { + process.nextTick(common.mustCall(function() { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks); + // Called all callbacks + assert.deepStrictEqual(callbacks._called, chunks); + })); + })); + + chunks.forEach(function(chunk, i) { + tw.write(chunk, callbacks[`callback-${i}`]); + }); + tw.end(); +} + +{ + // Verify end() callback + const tw = new TestWriter(); + tw.end(common.mustCall()); +} + +const helloWorldBuffer = Buffer.from('hello world'); + +{ + // Verify end() callback with chunk + const tw = new TestWriter(); + tw.end(helloWorldBuffer, common.mustCall()); +} + +{ + // Verify end() callback with chunk and encoding + const tw = new TestWriter(); + tw.end('hello world', 'ascii', common.mustCall()); +} + +{ + // Verify end() callback after write() call + const tw = new TestWriter(); + tw.write(helloWorldBuffer); + tw.end(common.mustCall()); +} + +{ + // Verify end() callback after write() callback + const tw = new TestWriter(); + let writeCalledback = false; + tw.write(helloWorldBuffer, function() { + writeCalledback = true; + }); + tw.end(common.mustCall(function() { + assert.strictEqual(writeCalledback, true); + })); +} + +{ + // Verify encoding is ignored for buffers + const tw = new W(); + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; + tw._write = common.mustCall(function(chunk) { + assert.strictEqual(chunk.toString('hex'), hex); + }); + const buf = Buffer.from(hex, 'hex'); + tw.write(buf, 'latin1'); +} + +{ + // Verify writables cannot be piped + const w = new W({ autoDestroy: false }); + w._write = common.mustNotCall(); + let gotError = false; + w.on('error', function() { + gotError = true; + }); + w.pipe(process.stdout); + assert.strictEqual(gotError, true); +} + +{ + // Verify that duplex streams cannot be piped + const d = new D(); + d._read = common.mustCall(); + d._write = common.mustNotCall(); + let gotError = false; + d.on('error', function() { + gotError = true; + }); + d.pipe(process.stdout); + assert.strictEqual(gotError, false); +} + +{ + // Verify that end(chunk) twice is an error + const w = new W(); + w._write = common.mustCall((msg) => { + assert.strictEqual(msg.toString(), 'this is the end'); + }); + let gotError = false; + w.on('error', function(er) { + gotError = true; + assert.strictEqual(er.message, 'write after end'); + }); + w.end('this is the end'); + w.end('and so is this'); + process.nextTick(common.mustCall(function() { + assert.strictEqual(gotError, true); + })); +} + +{ + // Verify stream doesn't end while writing + const w = new W(); + let wrote = false; + w._write = function(chunk, e, cb) { + assert.strictEqual(this.writing, undefined); + wrote = true; + this.writing = true; + setTimeout(() => { + this.writing = false; + cb(); + }, 1); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(wrote, true); + assert.strictEqual(this.writing, false); + })); + w.write(Buffer.alloc(0)); + w.end(); +} + +{ + // Verify finish does not come before write() callback + const w = new W(); + let writeCb = false; + w._write = function(chunk, e, cb) { + setTimeout(function() { + writeCb = true; + cb(); + }, 10); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(writeCb, true); + })); + w.write(Buffer.alloc(0)); + w.end(); +} + +{ + // Verify finish does not come before synchronous _write() callback + const w = new W(); + let writeCb = false; + w._write = function(chunk, e, cb) { + cb(); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(writeCb, true); + })); + w.write(Buffer.alloc(0), function() { + writeCb = true; + }); + w.end(); +} + +{ + // Verify finish is emitted if the last chunk is empty + const w = new W(); + w._write = function(chunk, e, cb) { + process.nextTick(cb); + }; + w.on('finish', common.mustCall()); + w.write(Buffer.allocUnsafe(1)); + w.end(Buffer.alloc(0)); +} + +{ + // Verify that finish is emitted after shutdown + const w = new W(); + let shutdown = false; + + w._final = common.mustCall(function(cb) { + assert.strictEqual(this, w); + setTimeout(function() { + shutdown = true; + cb(); + }, 100); + }); + w._write = function(chunk, e, cb) { + process.nextTick(cb); + }; + w.on('finish', common.mustCall(function() { + assert.strictEqual(shutdown, true); + })); + w.write(Buffer.allocUnsafe(1)); + w.end(Buffer.allocUnsafe(0)); +} + +{ + // Verify that error is only emitted once when failing in _finish. + const w = new W(); + + w._final = common.mustCall(function(cb) { + cb(new Error('test')); + }); + w.on('error', common.mustCall((err) => { + assert.strictEqual(w._writableState.errorEmitted, true); + assert.strictEqual(err.message, 'test'); + w.on('error', common.mustNotCall()); + w.destroy(new Error()); + })); + w.end(); +} + +{ + // Verify that error is only emitted once when failing in write. + const w = new W(); + w.on('error', common.mustNotCall()); + assert.throws(() => { + w.write(null); + }, { + code: 'ERR_STREAM_NULL_VALUES' + }); +} + +{ + // Verify that error is only emitted once when failing in write after end. + const w = new W(); + w.on('error', common.mustCall((err) => { + assert.strictEqual(w._writableState.errorEmitted, true); + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); + })); + w.end(); + w.write('hello'); + w.destroy(new Error()); +} + +{ + // Verify that finish is not emitted after error + const w = new W(); + + w._final = common.mustCall(function(cb) { + cb(new Error()); + }); + w._write = function(chunk, e, cb) { + process.nextTick(cb); + }; + w.on('error', common.mustCall()); + w.on('prefinish', common.mustNotCall()); + w.on('finish', common.mustNotCall()); + w.write(Buffer.allocUnsafe(1)); + w.end(Buffer.allocUnsafe(0)); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js new file mode 100644 index 0000000000..48daff946c --- /dev/null +++ b/test/parallel/test-stream3-cork-end.js @@ -0,0 +1,106 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); +const Writable = stream.Writable; + +// Test the buffering behavior of Writable streams. +// +// The call to cork() triggers storing chunks which are flushed +// on calling end() and the stream subsequently ended. +// +// node version target: 0.12 + +const expectedChunks = ['please', 'buffer', 'me', 'kindly']; +const inputChunks = expectedChunks.slice(0); +let seenChunks = []; +let seenEnd = false; + +const w = new Writable(); +// Let's arrange to store the chunks. +w._write = function(chunk, encoding, cb) { + // Stream end event is not seen before the last write. + assert.ok(!seenEnd); + // Default encoding given none was specified. + assert.strictEqual(encoding, 'buffer'); + + seenChunks.push(chunk); + cb(); +}; +// Let's record the stream end event. +w.on('finish', () => { + seenEnd = true; +}); + +function writeChunks(remainingChunks, callback) { + const writeChunk = remainingChunks.shift(); + let writeState; + + if (writeChunk) { + setImmediate(() => { + writeState = w.write(writeChunk); + // We were not told to stop writing. + assert.ok(writeState); + + writeChunks(remainingChunks, callback); + }); + } else { + callback(); + } +} + +// Do an initial write. +w.write('stuff'); +// The write was immediate. +assert.strictEqual(seenChunks.length, 1); +// Reset the seen chunks. +seenChunks = []; + +// Trigger stream buffering. +w.cork(); + +// Write the bufferedChunks. +writeChunks(inputChunks, () => { + // Should not have seen anything yet. + assert.strictEqual(seenChunks.length, 0); + + // Trigger flush and ending the stream. + w.end(); + + // Stream should not ended in current tick. + assert.ok(!seenEnd); + + // Buffered bytes should be seen in current tick. + assert.strictEqual(seenChunks.length, 4); + + // Did the chunks match. + for (let i = 0, l = expectedChunks.length; i < l; i++) { + const seen = seenChunks[i]; + // There was a chunk. + assert.ok(seen); + + const expected = Buffer.from(expectedChunks[i]); + // It was what we expected. + assert.ok(seen.equals(expected)); + } + + setImmediate(() => { + // Stream should have ended in next tick. + assert.ok(seenEnd); + }); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js new file mode 100644 index 0000000000..da60e247b6 --- /dev/null +++ b/test/parallel/test-stream3-cork-uncork.js @@ -0,0 +1,101 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); +const stream = require('../../lib'); +const Writable = stream.Writable; + +// Test the buffering behavior of Writable streams. +// +// The call to cork() triggers storing chunks which are flushed +// on calling uncork() in the same tick. +// +// node version target: 0.12 + +const expectedChunks = ['please', 'buffer', 'me', 'kindly']; +const inputChunks = expectedChunks.slice(0); +let seenChunks = []; +let seenEnd = false; + +const w = new Writable(); +// Let's arrange to store the chunks. +w._write = function(chunk, encoding, cb) { + // Default encoding given none was specified. + assert.strictEqual(encoding, 'buffer'); + + seenChunks.push(chunk); + cb(); +}; +// Let's record the stream end event. +w.on('finish', () => { + seenEnd = true; +}); + +function writeChunks(remainingChunks, callback) { + const writeChunk = remainingChunks.shift(); + let writeState; + + if (writeChunk) { + setImmediate(() => { + writeState = w.write(writeChunk); + // We were not told to stop writing. + assert.ok(writeState); + + writeChunks(remainingChunks, callback); + }); + } else { + callback(); + } +} + +// Do an initial write. +w.write('stuff'); +// The write was immediate. +assert.strictEqual(seenChunks.length, 1); +// Reset the chunks seen so far. +seenChunks = []; + +// Trigger stream buffering. +w.cork(); + +// Write the bufferedChunks. +writeChunks(inputChunks, () => { + // Should not have seen anything yet. + assert.strictEqual(seenChunks.length, 0); + + // Trigger writing out the buffer. + w.uncork(); + + // Buffered bytes should be seen in current tick. + assert.strictEqual(seenChunks.length, 4); + + // Did the chunks match. + for (let i = 0, l = expectedChunks.length; i < l; i++) { + const seen = seenChunks[i]; + // There was a chunk. + assert.ok(seen); + + const expected = Buffer.from(expectedChunks[i]); + // It was what we expected. + assert.ok(seen.equals(expected)); + } + + setImmediate(() => { + // The stream should not have been ended. + assert.ok(!seenEnd); + }); +}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js new file mode 100644 index 0000000000..b03b4e7c37 --- /dev/null +++ b/test/parallel/test-stream3-pause-then-read.js @@ -0,0 +1,185 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const stream = require('../../lib'); +const Readable = stream.Readable; +const Writable = stream.Writable; + +const totalChunks = 100; +const chunkSize = 99; +const expectTotalData = totalChunks * chunkSize; +let expectEndingData = expectTotalData; + +const r = new Readable({ highWaterMark: 1000 }); +let chunks = totalChunks; +r._read = function(n) { + silentConsole.log('_read called', chunks); + if (!(chunks % 2)) + setImmediate(push); + else if (!(chunks % 3)) + process.nextTick(push); + else + push(); +}; + +let totalPushed = 0; +function push() { + const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize, 'x') : null; + if (chunk) { + totalPushed += chunk.length; + } + silentConsole.log('chunks', chunks); + r.push(chunk); +} + +read100(); + +// First we read 100 bytes. +function read100() { + readn(100, onData); +} + +function readn(n, then) { + silentConsole.error(`read ${n}`); + expectEndingData -= n; + (function read() { + const c = r.read(n); + silentConsole.error('c', c); + if (!c) + r.once('readable', read); + else { + assert.strictEqual(c.length, n); + assert(!r.readableFlowing); + then(); + } + })(); +} + +// Then we listen to some data events. +function onData() { + expectEndingData -= 100; + silentConsole.error('onData'); + let seen = 0; + r.on('data', function od(c) { + seen += c.length; + if (seen >= 100) { + // Seen enough + r.removeListener('data', od); + r.pause(); + if (seen > 100) { + // Oh no, seen too much! + // Put the extra back. + const diff = seen - 100; + r.unshift(c.slice(c.length - diff)); + silentConsole.error('seen too much', seen, diff); + } + + // Nothing should be lost in-between. + setImmediate(pipeLittle); + } + }); +} + +// Just pipe 200 bytes, then unshift the extra and unpipe. +function pipeLittle() { + expectEndingData -= 200; + silentConsole.error('pipe a little'); + const w = new Writable(); + let written = 0; + w.on('finish', () => { + assert.strictEqual(written, 200); + setImmediate(read1234); + }); + w._write = function(chunk, encoding, cb) { + written += chunk.length; + if (written >= 200) { + r.unpipe(w); + w.end(); + cb(); + if (written > 200) { + const diff = written - 200; + written -= diff; + r.unshift(chunk.slice(chunk.length - diff)); + } + } else { + setImmediate(cb); + } + }; + r.pipe(w); +} + +// Now read 1234 more bytes. +function read1234() { + readn(1234, resumePause); +} + +function resumePause() { + silentConsole.error('resumePause'); + // Don't read anything, just resume and re-pause a whole bunch. + r.resume(); + r.pause(); + r.resume(); + r.pause(); + r.resume(); + r.pause(); + r.resume(); + r.pause(); + r.resume(); + r.pause(); + setImmediate(pipe); +} + + +function pipe() { + silentConsole.error('pipe the rest'); + const w = new Writable(); + let written = 0; + w._write = function(chunk, encoding, cb) { + written += chunk.length; + cb(); + }; + w.on('finish', () => { + silentConsole.error('written', written, totalPushed); + assert.strictEqual(written, expectEndingData); + assert.strictEqual(totalPushed, expectTotalData); + silentConsole.log('ok'); + }); + r.pipe(w); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js new file mode 100644 index 0000000000..daab8f3eae --- /dev/null +++ b/test/parallel/test-streams-highwatermark.js @@ -0,0 +1,102 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +const common = require('../common'); + +const assert = require('assert'); +const stream = require('../../lib'); +const { inspect } = require('util'); + +{ + // This test ensures that the stream implementation correctly handles values + // for highWaterMark which exceed the range of signed 32 bit integers and + // rejects invalid values. + + // This number exceeds the range of 32 bit integer arithmetic but should still + // be handled correctly. + const ovfl = Number.MAX_SAFE_INTEGER; + + const readable = stream.Readable({ highWaterMark: ovfl }); + assert.strictEqual(readable._readableState.highWaterMark, ovfl); + + const writable = stream.Writable({ highWaterMark: ovfl }); + assert.strictEqual(writable._writableState.highWaterMark, ovfl); + + for (const invalidHwm of [true, false, '5', {}, -5, NaN]) { + for (const type of [stream.Readable, stream.Writable]) { + assert.throws(() => { + type({ highWaterMark: invalidHwm }); + }, { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.highWaterMark' is invalid. " + + `Received ${inspect(invalidHwm)}` + }); + } + } +} + +{ + // This test ensures that the push method's implementation + // correctly handles the edge case where the highWaterMark and + // the state.length are both zero + + const readable = stream.Readable({ highWaterMark: 0 }); + + for (let i = 0; i < 3; i++) { + const needMoreData = readable.push(); + assert.strictEqual(needMoreData, true); + } +} + +{ + // This test ensures that the read(n) method's implementation + // correctly handles the edge case where the highWaterMark, state.length + // and n are all zero + + const readable = stream.Readable({ highWaterMark: 0 }); + + readable._read = common.mustCall(); + readable.read(0); +} + +{ + // Parse size as decimal integer + ['1', '1.0', 1].forEach((size) => { + const readable = new stream.Readable({ + read: common.mustCall(), + highWaterMark: 0, + }); + readable.read(size); + + assert.strictEqual(readable._readableState.highWaterMark, Number(size)); + }); +} + +{ + // Test highwatermark limit + const hwm = 0x40000000 + 1; + const readable = stream.Readable({ + read() {}, + }); + + assert.throws(() => readable.read(hwm), common.expectsError({ + code: 'ERR_OUT_OF_RANGE', + message: 'The value of "size" is out of range.' + + ' It must be <= 1GiB. Received ' + + hwm, + })); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/test-browser.js b/test/test-browser.js new file mode 100644 index 0000000000..e7ad511125 --- /dev/null +++ b/test/test-browser.js @@ -0,0 +1,84 @@ +'use strict' + +const test = require('tape') + +// if (!global.console) { +// global.console = {} +// } +// if (!global.console.log) { +// global.console.log = function () {} +// } +// if (!global.console.error) { +// global.console.error = global.console.log +// } +// if (!global.console.info) { +// global.console.info = global.console.log +// } + +// // TODO: add replacements instead +// global.process = { +// env: {}, +// on: function () {}, +// cwd: function () { +// return '/' +// }, +// binding: function () { +// return { +// hasTracing: false +// } +// } +// } + +test('streams', function (t) { + require('./browser/test-stream-big-packet')(t) + require('./browser/test-stream-big-push')(t) + require('./browser/test-stream-duplex')(t) + require('./browser/test-stream-end-paused')(t) + require('./browser/test-stream-ispaused')(t) + require('./browser/test-stream-finished')(t) + require('./browser/test-stream-pipeline')(t) + require('./browser/test-stream-pipe-after-end')(t) + require('./browser/test-stream-pipe-cleanup')(t) + require('./browser/test-stream-pipe-cleanup-pause')(t) + require('./browser/test-stream-pipe-error-handling')(t) + require('./browser/test-stream-pipe-event')(t) + require('./browser/test-stream-push-order')(t) + require('./browser/test-stream-push-strings')(t) + require('./browser/test-stream-readable-constructor-set-methods')(t) + require('./browser/test-stream-readable-event')(t) + require('./browser/test-stream-transform-constructor-set-methods')(t) + require('./browser/test-stream-transform-objectmode-falsey-value')(t) + require('./browser/test-stream-transform-split-objectmode')(t) + require('./browser/test-stream-unshift-empty-chunk')(t) + require('./browser/test-stream-unshift-read-race')(t) + require('./browser/test-stream-writable-change-default-encoding')(t) + require('./browser/test-stream-writable-constructor-set-methods')(t) + require('./browser/test-stream-writable-decoded-encoding')(t) + require('./browser/test-stream-writev')(t) + require('./browser/test-stream-sync-write')(t) + require('./browser/test-stream-pipe-without-listenerCount') +}) + +test('streams 2', function (t) { + require('./browser/test-stream2-base64-single-char-read-end')(t) + require('./browser/test-stream2-compatibility')(t) + require('./browser/test-stream2-large-read-stall')(t) + require('./browser/test-stream2-objects')(t) + require('./browser/test-stream2-pipe-error-handling')(t) + require('./browser/test-stream2-pipe-error-once-listener')(t) + require('./browser/test-stream2-push')(t) + require('./browser/test-stream2-readable-empty-buffer-no-eof')(t) + // require('./browser/test-stream2-readable-from-list')(t); + // require('./browser/test-stream2-transform')(t); + require('./browser/test-stream2-set-encoding')(t) + require('./browser/test-stream2-readable-legacy-drain')(t) + require('./browser/test-stream2-readable-wrap-empty')(t) + require('./browser/test-stream2-readable-non-empty-end')(t) + require('./browser/test-stream2-readable-wrap')(t) + require('./browser/test-stream2-unpipe-drain')(t) + require('./browser/test-stream2-writable')(t) +}) + +test('streams 3', function (t) { + require('./browser/test-stream3-pause-then-read')(t) +}) From 52a6038f95379b4b8b2439b9394d7b58bf319494 Mon Sep 17 00:00:00 2001 From: Shogun Date: Wed, 20 Apr 2022 11:58:02 +0200 Subject: [PATCH 03/19] feat: Updated to Node 18.0.0 and simplified errors and primordials. --- .airtap.yml | 69 +- .github/workflows/ci.yml | 13 +- .github/workflows/sauce.yml | 23 +- README.md | 4 +- build/build.mjs | 23 +- build/files.mjs | 15 +- build/replacements.mjs | 369 +-- lib/_stream_wrap.js | 5 - lib/internal/errors.js | 1663 ------------ lib/internal/inspect-browser.js | 2299 ----------------- lib/internal/inspect.js | 2299 ----------------- lib/internal/js_stream_socket.js | 242 -- lib/internal/primordials.js | 446 ---- lib/internal/streams/add-abort-signal.js | 2 +- lib/internal/streams/buffer_list.js | 6 +- lib/internal/streams/compose.js | 2 +- lib/internal/streams/destroy.js | 38 +- lib/internal/streams/duplex.js | 2 +- lib/internal/streams/duplexify.js | 6 +- lib/internal/streams/end-of-stream.js | 13 +- lib/internal/streams/from.js | 4 +- lib/internal/streams/lazy_transform.js | 2 +- lib/internal/streams/legacy.js | 2 +- lib/internal/streams/operators.js | 6 +- lib/internal/streams/passthrough.js | 2 +- lib/internal/streams/pipeline.js | 62 +- lib/internal/streams/readable.js | 37 +- lib/internal/streams/state.js | 4 +- lib/internal/streams/transform.js | 71 +- lib/internal/streams/utils.js | 32 +- lib/internal/streams/writable.js | 65 +- lib/internal/validators.js | 15 +- lib/{ => ours}/browser.js | 4 +- lib/ours/errors.js | 353 +++ lib/{ => ours}/index.js | 4 +- lib/ours/primordials.js | 101 + lib/ours/util.js | 65 + lib/stream.js | 6 +- lib/stream/promises.js | 2 +- lib/util.js | 78 - package.json | 14 +- src/browser.js | 4 +- src/errors.js | 353 +++ src/index.js | 4 +- src/primordials.js | 101 + src/test/ours/test-errors.js | 2 +- src/test/ours/test-fake-timers.js | 2 +- src/test/ours/test-stream-sync-write.js | 2 +- src/test/test-browser.js | 31 +- src/util.js | 81 +- test/common/index.js | 54 +- test/ours/test-errors.js | 2 +- test/ours/test-fake-timers.js | 2 +- test/ours/test-stream-sync-write.js | 2 +- .../test-readable-from-iterator-closing.js | 2 +- test/parallel/test-readable-from.js | 2 +- test/parallel/test-readable-large-hwm.js | 2 +- test/parallel/test-readable-single-end.js | 2 +- test/parallel/test-stream-add-abort-signal.js | 2 +- test/parallel/test-stream-aliases-legacy.js | 2 +- test/parallel/test-stream-asIndexedPairs.mjs | 2 +- test/parallel/test-stream-auto-destroy.js | 2 +- ...riters-in-synchronously-recursion-write.js | 2 +- test/parallel/test-stream-backpressure.js | 2 +- test/parallel/test-stream-big-packet.js | 2 +- test/parallel/test-stream-big-push.js | 2 +- test/parallel/test-stream-catch-rejections.js | 2 +- test/parallel/test-stream-compose.js | 2 +- .../test-stream-construct-async-error.js | 255 -- test/parallel/test-stream-construct.js | 2 +- .../test-stream-decoder-objectmode.js | 2 +- .../test-stream-destroy-event-order.js | 2 +- test/parallel/test-stream-drop-take.js | 2 +- test/parallel/test-stream-duplex-destroy.js | 4 +- test/parallel/test-stream-duplex-end.js | 2 +- test/parallel/test-stream-duplex-from.js | 2 +- test/parallel/test-stream-duplex-props.js | 2 +- .../test-stream-duplex-readable-end.js | 2 +- .../test-stream-duplex-readable-writable.js | 2 +- .../test-stream-duplex-writable-finished.js | 2 +- test/parallel/test-stream-duplex.js | 2 +- test/parallel/test-stream-end-of-streams.js | 35 + test/parallel/test-stream-end-paused.js | 2 +- test/parallel/test-stream-error-once.js | 2 +- test/parallel/test-stream-events-prepend.js | 2 +- test/parallel/test-stream-filter.js | 2 +- test/parallel/test-stream-finished.js | 13 +- test/parallel/test-stream-flatMap.js | 2 +- test/parallel/test-stream-forEach.js | 2 +- test/parallel/test-stream-inheritance.js | 2 +- test/parallel/test-stream-ispaused.js | 2 +- ...-stream-iterator-helpers-test262-tests.mjs | 6 +- .../test-stream-objectmode-undefined.js | 2 +- .../test-stream-once-readable-pipe.js | 2 +- .../parallel/test-stream-passthrough-drain.js | 2 +- test/parallel/test-stream-pipe-after-end.js | 2 +- ...t-stream-pipe-await-drain-manual-resume.js | 2 +- ...tream-pipe-await-drain-push-while-write.js | 2 +- test/parallel/test-stream-pipe-await-drain.js | 2 +- .../test-stream-pipe-cleanup-pause.js | 2 +- test/parallel/test-stream-pipe-cleanup.js | 2 +- .../test-stream-pipe-error-handling.js | 2 +- .../test-stream-pipe-error-unhandled.js | 2 +- test/parallel/test-stream-pipe-event.js | 2 +- .../test-stream-pipe-flow-after-unpipe.js | 2 +- test/parallel/test-stream-pipe-flow.js | 2 +- .../test-stream-pipe-manual-resume.js | 2 +- .../test-stream-pipe-multiple-pipes.js | 2 +- test/parallel/test-stream-pipe-needDrain.js | 2 +- ...test-stream-pipe-same-destination-twice.js | 2 +- .../test-stream-pipe-unpipe-streams.js | 2 +- .../test-stream-pipe-without-listenerCount.js | 2 +- .../test-stream-pipeline-async-iterator.js | 2 +- test/parallel/test-stream-pipeline-http2.js | 2 +- .../test-stream-pipeline-listeners.js | 91 + test/parallel/test-stream-pipeline-process.js | 2 +- ...t-stream-pipeline-queued-end-in-destroy.js | 2 +- .../parallel/test-stream-pipeline-uncaught.js | 2 +- .../test-stream-pipeline-with-empty-string.js | 2 +- test/parallel/test-stream-promises.js | 2 +- test/parallel/test-stream-push-order.js | 2 +- test/parallel/test-stream-push-strings.js | 2 +- test/parallel/test-stream-readable-aborted.js | 2 +- ...t-stream-readable-add-chunk-during-data.js | 2 +- ...stream-readable-constructor-set-methods.js | 2 +- test/parallel/test-stream-readable-data.js | 2 +- test/parallel/test-stream-readable-destroy.js | 4 +- test/parallel/test-stream-readable-didRead.js | 2 +- ...eam-readable-emit-readable-short-stream.js | 2 +- .../test-stream-readable-emittedReadable.js | 2 +- .../test-stream-readable-end-destroyed.js | 2 +- test/parallel/test-stream-readable-ended.js | 2 +- .../test-stream-readable-error-end.js | 2 +- test/parallel/test-stream-readable-event.js | 2 +- .../test-stream-readable-flow-recursion.js | 2 +- .../test-stream-readable-hwm-0-async.js | 2 +- ...test-stream-readable-hwm-0-no-flow-data.js | 2 +- test/parallel/test-stream-readable-hwm-0.js | 2 +- .../test-stream-readable-infinite-read.js | 2 +- .../test-stream-readable-invalid-chunk.js | 2 +- .../test-stream-readable-needReadable.js | 2 +- .../test-stream-readable-next-no-null.js | 2 +- ...st-stream-readable-no-unneeded-readable.js | 2 +- ...stream-readable-object-multi-push-async.js | 2 +- .../test-stream-readable-pause-and-resume.js | 4 +- ...st-stream-readable-readable-then-resume.js | 2 +- .../parallel/test-stream-readable-readable.js | 2 +- ...est-stream-readable-reading-readingMore.js | 2 +- .../test-stream-readable-resume-hwm.js | 2 +- .../test-stream-readable-resumeScheduled.js | 2 +- ...m-readable-setEncoding-existing-buffers.js | 2 +- .../test-stream-readable-setEncoding-null.js | 2 +- .../test-stream-readable-unpipe-resume.js | 2 +- test/parallel/test-stream-readable-unshift.js | 2 +- ...tream-readable-with-unimplemented-_read.js | 2 +- .../test-stream-readableListening-state.js | 2 +- test/parallel/test-stream-reduce.js | 2 +- test/parallel/test-stream-some-find-every.mjs | 2 +- test/parallel/test-stream-toArray.js | 2 +- .../test-stream-transform-callback-twice.js | 2 +- ...tream-transform-constructor-set-methods.js | 2 +- .../parallel/test-stream-transform-destroy.js | 4 +- .../test-stream-transform-final-sync.js | 2 +- test/parallel/test-stream-transform-final.js | 2 +- .../test-stream-transform-flush-data.js | 2 +- ...tream-transform-objectmode-falsey-value.js | 2 +- ...st-stream-transform-split-highwatermark.js | 2 +- .../test-stream-transform-split-objectmode.js | 2 +- test/parallel/test-stream-uint8array.js | 2 +- test/parallel/test-stream-unpipe-event.js | 2 +- .../test-stream-unshift-empty-chunk.js | 2 +- .../parallel/test-stream-unshift-read-race.js | 2 +- test/parallel/test-stream-wrap-drain.js | 65 - test/parallel/test-stream-wrap-encoding.js | 58 - test/parallel/test-stream-wrap.js | 48 - test/parallel/test-stream-writable-aborted.js | 41 + .../test-stream-writable-callback-twice.js | 2 +- ...stream-writable-change-default-encoding.js | 2 +- .../test-stream-writable-clear-buffer.js | 2 +- ...stream-writable-constructor-set-methods.js | 2 +- .../test-stream-writable-decoded-encoding.js | 2 +- test/parallel/test-stream-writable-destroy.js | 4 +- .../test-stream-writable-end-cb-error.js | 2 +- .../test-stream-writable-end-cb-uncaught.js | 2 +- .../test-stream-writable-end-multiple.js | 2 +- .../test-stream-writable-ended-state.js | 2 +- .../test-stream-writable-final-async.js | 2 +- .../test-stream-writable-final-destroy.js | 2 +- .../test-stream-writable-final-throw.js | 2 +- .../test-stream-writable-finish-destroyed.js | 12 +- .../test-stream-writable-finished-state.js | 2 +- .../parallel/test-stream-writable-finished.js | 2 +- .../test-stream-writable-invalid-chunk.js | 2 +- .../test-stream-writable-needdrain-state.js | 2 +- test/parallel/test-stream-writable-null.js | 2 +- .../test-stream-writable-properties.js | 2 +- .../test-stream-writable-samecb-singletick.js | 2 +- .../parallel/test-stream-writable-writable.js | 2 +- .../test-stream-writable-write-cb-error.js | 2 +- .../test-stream-writable-write-cb-twice.js | 2 +- .../test-stream-writable-write-error.js | 2 +- ...est-stream-writable-write-writev-finish.js | 2 +- .../test-stream-writableState-ending.js | 2 +- ...ableState-uncorked-bufferedRequestCount.js | 2 +- test/parallel/test-stream-write-destroy.js | 8 +- test/parallel/test-stream-write-drain.js | 2 +- test/parallel/test-stream-write-final.js | 2 +- test/parallel/test-stream-writev.js | 2 +- ...est-stream2-base64-single-char-read-end.js | 2 +- test/parallel/test-stream2-basic.js | 2 +- test/parallel/test-stream2-compatibility.js | 2 +- test/parallel/test-stream2-decode-partial.js | 2 +- .../test-stream2-finish-pipe-error.js | 2 +- test/parallel/test-stream2-finish-pipe.js | 2 +- .../parallel/test-stream2-large-read-stall.js | 2 +- test/parallel/test-stream2-objects.js | 2 +- .../test-stream2-pipe-error-handling.js | 2 +- .../test-stream2-pipe-error-once-listener.js | 2 +- test/parallel/test-stream2-push.js | 2 +- test/parallel/test-stream2-read-sync-stack.js | 2 +- ...st-stream2-readable-empty-buffer-no-eof.js | 2 +- .../test-stream2-readable-from-list.js | 2 +- .../test-stream2-readable-legacy-drain.js | 2 +- .../test-stream2-readable-non-empty-end.js | 2 +- .../test-stream2-readable-wrap-destroy.js | 2 +- .../test-stream2-readable-wrap-empty.js | 2 +- .../test-stream2-readable-wrap-error.js | 2 +- test/parallel/test-stream2-readable-wrap.js | 2 +- test/parallel/test-stream2-set-encoding.js | 2 +- test/parallel/test-stream2-transform.js | 2 +- test/parallel/test-stream2-unpipe-drain.js | 2 +- test/parallel/test-stream2-unpipe-leak.js | 2 +- test/parallel/test-stream2-writable.js | 2 +- test/parallel/test-stream3-cork-end.js | 2 +- test/parallel/test-stream3-cork-uncork.js | 2 +- test/parallel/test-stream3-pause-then-read.js | 2 +- test/parallel/test-streams-highwatermark.js | 2 +- test/test-browser.js | 31 +- 238 files changed, 1857 insertions(+), 8268 deletions(-) delete mode 100644 lib/_stream_wrap.js delete mode 100644 lib/internal/errors.js delete mode 100644 lib/internal/inspect-browser.js delete mode 100644 lib/internal/inspect.js delete mode 100644 lib/internal/js_stream_socket.js delete mode 100644 lib/internal/primordials.js rename lib/{ => ours}/browser.js (93%) create mode 100644 lib/ours/errors.js rename lib/{ => ours}/index.js (96%) create mode 100644 lib/ours/primordials.js create mode 100644 lib/ours/util.js delete mode 100644 lib/util.js create mode 100644 src/errors.js create mode 100644 src/primordials.js delete mode 100644 test/parallel/test-stream-construct-async-error.js create mode 100644 test/parallel/test-stream-end-of-streams.js create mode 100644 test/parallel/test-stream-pipeline-listeners.js delete mode 100644 test/parallel/test-stream-wrap-drain.js delete mode 100644 test/parallel/test-stream-wrap-encoding.js delete mode 100644 test/parallel/test-stream-wrap.js create mode 100644 test/parallel/test-stream-writable-aborted.js diff --git a/.airtap.yml b/.airtap.yml index 6fb5d5e957..6f17246646 100644 --- a/.airtap.yml +++ b/.airtap.yml @@ -1,13 +1,41 @@ -providers: - - airtap-sauce sauce_connect: true -browsers: - - name: chrome - - name: firefox - - name: safari - - name: edge presets: + sauce: + providers: + - airtap-sauce + browsers: + - name: chrome + - name: firefox + # Testing on Safari disabled due to https://github.com/airtap/sauce/issues/11 + # - name: safari + - name: edge + + sauce-chrome: + providers: + - airtap-sauce + browsers: + - name: chrome + + sauce-firefox: + providers: + - airtap-sauce + browsers: + - name: firefox + + # Testing on Safari disabled due to https://github.com/airtap/sauce/issues/11 + # sauce-safari: + # providers: + # - airtap-sauce + # browsers: + # - name: safari + + sauce-edge: + providers: + - airtap-sauce + browsers: + - name: edge + local: providers: - airtap-playwright @@ -19,3 +47,30 @@ presets: options: launch: channel: msedge + + local-chrome: + providers: + - airtap-playwright + browsers: + - name: chrome + + local-firefox: + providers: + - airtap-playwright + browsers: + - name: firefox + + local-safari: + providers: + - airtap-playwright + browsers: + - name: webkit + + local-edge: + providers: + - airtap-playwright + browsers: + - name: chromium + options: + launch: + channel: msedge diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 81a27407a1..003edcd6fe 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,6 +4,7 @@ on: [push, pull_request] jobs: build: + name: Node.js Build runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -11,12 +12,18 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] node-version: [14.x, 16.x, 17.x] steps: - - uses: actions/checkout@v3 + - name: Checkout + uses: actions/checkout@v3 - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }} uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} - - name: npm install + - name: Restore cached dependencies + uses: actions/cache@v3 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + - name: Install dependencies run: npm install - - name: npm run test + - name: Run Tests run: npm run test diff --git a/.github/workflows/sauce.yml b/.github/workflows/sauce.yml index 5b63a693d1..a8750eb257 100644 --- a/.github/workflows/sauce.yml +++ b/.github/workflows/sauce.yml @@ -2,23 +2,36 @@ name: Sauce Labs Build on: push jobs: test: + name: Sauce Labs Build runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + browser: ['chrome', 'firefox', 'edge'] steps: - name: Checkout uses: actions/checkout@v3 - - name: Set up node + - name: Use Node.js 16 uses: actions/setup-node@v3 with: node-version: 16 - - name: Install + - name: Restore cached dependencies + uses: actions/cache@v3 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + - name: Install dependencies run: npm install env: # Download Sauce Connect binary now instead of on first run SAUCE_CONNECT_DOWNLOAD_ON_INSTALL: true - - name: Add host + - name: Add airtap.local to /etc/hosts run: echo "127.0.0.1 airtap.local" | sudo tee -a /etc/hosts - - name: Test - run: npm run test:browsers + - name: Pause to avoid Sauce Labs timeouts + run: sleep 30s + shell: bash + - name: Run Test + run: ./node_modules/.bin/airtap -p sauce-${{ matrix.browser }} test/browser/test-*.js env: SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }} SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }} diff --git a/README.md b/README.md index f166259574..0db3f64ef8 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,9 @@ npm install --save readable-stream ``` -This package is a mirror of the streams implementations in Node.js 17.9.0. +This package is a mirror of the streams implementations in Node.js 18.0.0. -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v17.9.0/docs/api/stream.html). +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v18.0.0/docs/api/stream.html). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). diff --git a/build/build.mjs b/build/build.mjs index 3080c7ecb0..337f43b23e 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -157,6 +157,7 @@ async function main() { // Create paths const paths = new Set(contents.map((c) => dirname(c[0]))) paths.delete('.') + paths.add('lib/ours') for (const path of paths.values()) { console.log(`Creating directory ${highlightFile(path, 32)} ...`) @@ -167,14 +168,20 @@ async function main() { await processFiles(contents) // Copy template files - console.log(`Copying template to file ${highlightFile('lib/browser.js', 32)} ...`) - await copyFile('src/browser.js', 'lib/browser.js') + console.log(`Copying template to file ${highlightFile('lib/ours/browser.js', 32)} ...`) + await copyFile('src/browser.js', 'lib/ours/browser.js') - console.log(`Copying template to file ${highlightFile('lib/index.js', 32)} ...`) - await copyFile('src/index.js', 'lib/index.js') + console.log(`Copying template to file ${highlightFile('lib/ours/index.js', 32)} ...`) + await copyFile('src/index.js', 'lib/ours/index.js') - console.log(`Copying template to file ${highlightFile('lib/util.js', 32)} ...`) - await copyFile('src/util.js', 'lib/util.js') + console.log(`Copying template to file ${highlightFile('lib/ours/errors.js', 32)} ...`) + await copyFile('src/errors.js', 'lib/ours/errors.js') + + console.log(`Copying template to file ${highlightFile('lib/ours/primordials.js', 32)} ...`) + await copyFile('src/primordials.js', 'lib/ours/primordials.js') + + console.log(`Copying template to file ${highlightFile('lib/ours/util.js', 32)} ...`) + await copyFile('src/util.js', 'lib/ours/util.js') console.log(`Copying template to file ${highlightFile('test/test-browser.js', 32)} ...`) await copyFile('src/test/test-browser.js', 'test/test-browser.js') @@ -184,10 +191,6 @@ async function main() { console.log(`Copying template to file ${highlightFile('test/ours', 32)} ...`) await cp('src/test/ours', 'test/ours', { recursive: true }) - - // Remove some unwanted directories - await rm('lib/internal/per_context', { recursive: true, force: true }) - await rm('lib/internal/util', { recursive: true, force: true }) } await main() diff --git a/build/files.mjs b/build/files.mjs index 45f9f14ab9..84c17d975c 100644 --- a/build/files.mjs +++ b/build/files.mjs @@ -1,11 +1,7 @@ export const sources = [ 'lib/_stream_.+', - 'lib/internal/errors.js', - 'lib/internal/per_context/primordials.js', 'lib/internal/streams/.+', - 'lib/internal/util/inspect.js', 'lib/internal/validators.js', - 'lib/internal/js_stream_socket.js', 'lib/stream.js', 'lib/stream/promises.js', 'test/common/fixtures.js', @@ -19,14 +15,15 @@ export const sources = [ ] export const skippedSources = [ + 'lib/_stream_wrap.js', 'test/parallel/test-stream-consumers.js', 'test/parallel/test-stream-destroy.js', 'test/parallel/test-stream-map.js', 'test/parallel/test-stream-pipeline.js', - 'test/parallel/test-stream-readable-async-iterators.js' + 'test/parallel/test-stream-readable-async-iterators.js', + 'test/parallel/test-stream-wrap-drain.js', + 'test/parallel/test-stream-wrap-encoding.js', + 'test/parallel/test-stream-wrap.js' ] -export const aliases = { - 'lib/internal/per_context/primordials.js': ['lib/internal/primordials.js'], - 'lib/internal/util/inspect.js': ['lib/internal/inspect.js', 'lib/internal/inspect-browser.js'] -} +export const aliases = {} diff --git a/build/replacements.mjs b/build/replacements.mjs index b48d97714e..0fa985b7dc 100644 --- a/build/replacements.mjs +++ b/build/replacements.mjs @@ -1,39 +1,6 @@ const legacyStreamsRequireStream = ["require\\('stream'\\)", "require('./stream')"] -const streamsInternalsPrimordials = ['= primordials', "= require('../primordials')"] - -const streamsInternalsInspect = [ - "const { inspect } = require\\('internal/util/inspect'\\);", - "const inspect = { custom: Symbol('nodejs.util.inspect.custom') };" -] - -const streamsInternalsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('./$1')"] - -const streamsInternalsRequireRelativeUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('../../util')"] - -const streamsInternalsRequireRelativeInternalUtil = ["require\\('internal/util'\\)", "require('../../util')"] - -const streamsInternalsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] - -const streamsInternalsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./duplex')"] - -const streamsInternalsRequireWebStream = ["require\\('../webstreams/adapters'\\)", '{}'] - -const streamsInternalNoRequireAbortController = [ - 'const \\{ AbortController \\} = .+', - ` - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; - } - ` -] - -const streamsInternalWeakHandler = [ - "const \\{ kWeakHandler \\} = require\\('../event_target'\\);", - "const kWeakHandler = require('../primordials').Symbol('kWeak');" -] - -const streamsInternalBlob = [ +const internalStreamsBlob = [ "require\\('../blob'\\);", ` { @@ -46,77 +13,44 @@ const streamsInternalBlob = [ ` ] -const errorsRequireRelativeInspect = ["require\\('internal/util/inspect'\\)", "require('./inspect')"] +const internalStreamsInspectCustom = ['inspect.custom', "Symbol.for('nodejs.util.inspect.custom')"] -const errorsRequireTty = ["require\\('internal/tty'\\).hasColors\\(\\)", 'false'] - -const errorsRequireCheckCaptureStackTrace = [ - 'ErrorCaptureStackTrace\\(err\\);', +const internalStreamsNoRequireAbortController = [ + 'const \\{ AbortController \\} = .+', ` - if (typeof ErrorCaptureStackTrace === 'function') { - ErrorCaptureStackTrace(err); + if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController; } ` ] -const inspectSequencesRegExp = [ - 'const strEscapeSequencesRegExp = .+', - 'const strEscapeSequencesRegExp = /[\\x00-\\x1f\\x27\\x5c\\x7f-\\x9f]/;' -] +const internalStreamsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] -const inspectSequencesReplacer = [ - 'const strEscapeSequencesReplacer = .+', - 'const strEscapeSequencesReplacer = /[\\x00-\\x1f\\x27\\x5c\\x7f-\\x9f]/g' -] +const internalStreamsRequireErrors = ["require\\('internal/errors'\\)", "require('../../ours/errors')"] -const inspectSequencesRegExpSingle = [ - 'const strEscapeSequencesRegExpSingle = .+', - 'const strEscapeSequencesRegExpSingle = /[\\x00-\\x1f\\x5c\\x7f-\\x9f]/;' -] +const internalStreamsRequirePrimordials = ['= primordials', "= require('../../ours/primordials')"] -const inspectSequencesReplacerSingle = [ - 'const strEscapeSequencesReplacerSingle = .+', - 'const strEscapeSequencesReplacerSingle = /[\\x00-\\x1f\\x5c\\x7f-\\x9f]/g;' +const internalStreamsRequireRelativeUtil = [ + 'const \\{ (once|createDeferredPromise|) \\} = .+;', + "const { $1 } = require('../../ours/util');" ] -const inspectLookBehind = ['\\(\\?[<=]', '(?:'] - -const internalsRequireAssert = ["require\\('internal/assert'\\)", "require('assert')"] - -const inspectNativeModule = [ - "require\\('internal/bootstrap/loaders'\\);", - ` - { - NativeModule: { - exists() { - return false; - } - } - } - ` -] - -const inspectIntl = ["internalBinding\\('config'\\)\\.hasIntl", 'false'] - -const inspectIcuBinding = ["internalBinding\\('icu'\\)", '{}'] - -const streamSocketInspectBinding = ['internalBinding', 'process.binding'] - -const streamSocketDebugLog = ["require\\('internal/util/debuglog'\\)", "require('../util')"] +const internalStreamsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./duplex')"] -const inspectRequireUtil = ["internalBinding\\('util'\\)", "require('../util')"] +const internalStreamsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('./$1')"] -const internalRequireRelativeInternalUtil = ["require\\('internal/util'\\)", "require('../util')"] +const internalStreamsRequireUtil = ["require\\('internal/util(?:/(?:debuglog|inspect))?'\\)", "require('util')"] -const internalRequireRelativeInternal = ["require\\('internal/([^']+)'\\)", "require('./$1')"] +const internalStreamsRequireUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('util')"] -const internalRequireAsyncHooks = ["require\\('./async_hooks'\\)", "require('internal/async_hooks')"] +const internalStreamsRequireWebStream = ["require\\('internal/webstreams/adapters'\\)", '{}'] -const internalPrimordials = ['= primordials', "= require('./primordials')"] - -const internalRequireRelativeTypes = ["require\\('internal/util/types'\\)", "require('../util')"] +const internalStreamsWeakHandler = [ + "const \\{ kWeakHandler \\} = require\\('../event_target'\\);", + "const kWeakHandler = require('../../ours/primordials').Symbol('kWeak');" +] -const internalNoCoalesceAssignment = [ +const internalValidatorsNoCoalesceAssignment = [ '\\s*(.+) \\?\\?= (.+)', ` if (typeof $1 === 'undefined') { @@ -125,33 +59,22 @@ const internalNoCoalesceAssignment = [ ` ] -const primordialsDefine = [ - "('use strict';)", - ` - $1 - - const primordials = module.exports = {} - ` +const internalValidatorsNoRequireSignals = [ + "const \\{ signals \\} = internalBinding\\('constants'\\).os;", + 'const signals = {};' ] -const primordialsAggregateError = [ - '(= Reflect;)', - ` - $1 +const internalValidatorsRequireAssert = ["require\\('internal/assert'\\)", "require('assert')"] - if (typeof AggregateError === 'undefined') { - globalThis.AggregateError = require('aggregate-error'); - } - ` -] +const internalValidatorsRequireAsyncHooks = ["require\\('./async_hooks'\\)", "require('internal/async_hooks')"] -const validatorSignals = ["const \\{ signals \\} = internalBinding\\('constants'\\).os;", 'const signals = {};'] +const internalValidatorsRequireErrors = ["require\\('internal/errors'\\)", "require('../ours/errors')"] -const streamIndexPrimordials = ['= primordials', "= require('./internal/primordials')"] +const internalValidatorsRequirePrimordials = ['= primordials', "= require('../ours/primordials')"] -const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('./util')"] +const internalValidatorsRequireRelativeUtil = ["require\\('internal/util'\\)", "require('../ours/util')"] -const streamIndexRequireInternalBuffer = ["require\\('internal/buffer'\\)", '{}'] +const internalValidatorsRequireUtilTypes = ["require\\('internal/util/types'\\)", "require('util').types"] const streamIndexIsUint8Array = [ "Stream._isUint8Array = require\\('internal/util/types'\\).isUint8Array;", @@ -164,37 +87,42 @@ const streamIndexIsUint8Array = [ const streamIndexRequireInternal = ["require\\('internal/([^']+)'\\)", "require('./internal/$1')"] -const streamIndexRequirePromises = ["require\\('stream/promises'\\);", "require('./stream/promises');"] - -const streamIndexUint8ArrayToBuffer = ['new internalBuffer.FastBuffer', 'Buffer.from'] - -const streamsPrimordials = ['= primordials', "= require('../internal/primordials')"] - -const streamsRequireInternal = ["require\\('internal/(.+)'\\)", "require('../internal/$1')"] +const streamIndexRequireInternalBuffer = ["require\\('internal/buffer'\\)", '{}'] -const streamsConsumerTextDecoder = ["const \\{\\n\\s+TextDecoder,\\n\\} = require\\('../internal/encoding'\\);\\n", ''] +const streamIndexRequireErrors = ["require\\('internal/errors'\\);", "require('./ours/errors');"] -const streamsConsumerNoRequireBlob = ["const \\{\\n\\s+Blob,\\n\\} = require\\('../internal/blob'\\);\\n", ''] +const streamIndexRequirePrimordials = ['= primordials', "= require('./ours/primordials')"] -const streamsConsumerRequireBlobFromBuffer = ['(\\s+Buffer,)', '$1 Blob,'] +const streamIndexRequirePromises = ["require\\('stream/promises'\\);", "require('./stream/promises');"] -const webstreamPrimordials = ['= primordials', "= require('../primordials')"] +const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('util')"] -const webstreamsRequireRelative = ["require\\('internal/webstreams/([^']+)'\\)", "require('./$1')"] +const streamIndexUint8ArrayToBuffer = ['new internalBuffer.FastBuffer', 'Buffer.from'] -const webstreamsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('../streams/$1')"] +const streamsRequireErrors = ["require\\('internal/errors'\\)", "require('../ours/errors')"] -const webstreamsRequireStream = ["require\\('stream'\\)", "require('../../stream')"] +const streamsRequireInternal = ["require\\('internal/(.+)'\\)", "require('../internal/$1')"] -const webstreamsRequireUtil = ["require\\('internal/util'\\)", "require('../../util')"] +const streamsRequirePrimordials = ['= primordials', "= require('../ours/primordials')"] -const webstreamsRequireErrorsOrValidators = ["require\\('internal/(errors|validators)'\\)", "require('../$1')"] +const testCommonKnownGlobals = [ + 'let knownGlobals = \\[(\\n\\s+)', + ` + let knownGlobals = [\n + typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), + typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, + typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, + typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, + ` +] -const webstreamsConsumerNoRequireTextAPI = [ - "const \\{\\n\\s+TextDecoder,\\n\\s+TextEncoder,\\n\\} = require\\('internal/encoding'\\);\\n", - '' +const testParallelBindings = [ + "const \\{ internalBinding \\} = require\\('../../lib/internal/test/binding'\\);", + 'const internalBinding = process.binding' ] +const testParallelHasOwn = ['Object.hasOwn\\(', 'Reflect.has('] + const testParallelIncludeTap = [ "('use strict')", ` @@ -205,30 +133,44 @@ const testParallelIncludeTap = [ ` ] -const testParallelRequireStream = ["require\\('stream'\\)", "require('../../lib')"] +const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/ours/index.js';"] -const testParallelRequireStreamPromises = ["require\\('stream/promises'\\)", "require('../../lib/stream/promises')"] +const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"] -const testParallelRequireStreamConsumer = ["require\\('stream/consumer'\\)", "require('../../lib/stream/consumer')"] +const testParallelFinishedEvent = ["res.on\\('close", "res.on('finish"] -const testParallelRequireStreamWeb = ["require\\('stream/web'\\)", "require('../../lib/stream/web')"] +const testParallelFlatMapWinLineSeparator = [ + "'xyz\\\\n'\\.repeat\\(5\\)", + "(process.platform === 'win32' ? 'xyz\\r\\n' : 'xyz\\n').repeat(5)" +] -const testParallelRequireStreamInternalsLegacy = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"] +const testParallelPreprocessWinLineSeparator = [ + 'assert.strictEqual\\(streamedData, modelData\\);', + "assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\\r\\n/g, '\\n') : modelData);" +] -const testParallelRequireStreamInternals = ["require\\('(internal/.+)'\\)", "require('../../lib/$1')"] +const testParallelReadableBufferListInspect = [ + 'assert.strictEqual\\(\\n\\s+util.inspect\\(\\[ list \\], \\{ compact: false \\}\\),\\n\\s+`\\[\\n\\s+BufferList \\{\\n\\s+head: \\[Object\\],\\n\\s+tail: \\[Object\\],\\n\\s+length: 4\\n\\s+\\}\\n\\]`\\);', + ` + assert.strictEqual(typeof list.head, 'object'); + assert.strictEqual(typeof list.tail, 'object'); + assert.strictEqual(list.length, 4); + ` +] -const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/index.js';"] +const testParallelRequireStream = ["require\\('stream'\\)", "require('../../lib/ours/index')"] -const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"] +const testParallelRequireStreamConsumer = ["require\\('stream/consumer'\\)", "require('../../lib/stream/consumer')"] -const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1'] +const testParallelRequireStreamInternals = ["require\\('(internal/.+)'\\)", "require('../../lib/$1')"] -const testParallelHasOwn = ['Object.hasOwn\\(', 'Reflect.has('] +const testParallelRequireStreamInternalsLegacy = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"] -const testParallelBindings = [ - "const \\{ internalBinding \\} = require\\('../../lib/internal/test/binding'\\);", - 'const internalBinding = process.binding' -] +const testParallelRequireStreamPromises = ["require\\('stream/promises'\\)", "require('../../lib/stream/promises')"] + +const testParallelRequireStreamWeb = ["require\\('stream/web'\\)", "require('../../lib/stream/web')"] + +const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1'] const testParallelTimersPromises = [ "const { setTimeout } = require\\('timers/promises'\\);", @@ -243,40 +185,9 @@ const testParallelTimersPromises = [ ` ] -const testKnownGlobals = [ - 'let knownGlobals = \\[(\\n\\s+)', - ` - let knownGlobals = [\n - typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), - typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, - typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, - typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, - ` -] - -const testTicksReenableConsoleLog = ['silentConsole.log\\(i\\);', 'console.log(i);'] - -const testTickSaveHook = ['async_hooks.createHook\\(\\{', 'const hook = async_hooks.createHook({'] +const testParallelTicksReenableConsoleLog = ['silentConsole.log\\(i\\);', 'console.log(i);'] -const testReadableBufferListInspect = [ - 'assert.strictEqual\\(\\n\\s+util.inspect\\(\\[ list \\], \\{ compact: false \\}\\),\\n\\s+`\\[\\n\\s+BufferList \\{\\n\\s+head: \\[Object\\],\\n\\s+tail: \\[Object\\],\\n\\s+length: 4\\n\\s+\\}\\n\\]`\\);', - ` - assert.strictEqual(typeof list.head, 'object'); - assert.strictEqual(typeof list.tail, 'object'); - assert.strictEqual(list.length, 4); - ` -] - -const testFinishedEvent = ["res.on\\('close", "res.on('finish"] -const testPreprocessWinLineSeparator = [ - 'assert.strictEqual\\(streamedData, modelData\\);', - "assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\\r\\n/g, '\\n') : modelData);" -] - -const testFlatMapWinLineSeparator = [ - "'xyz\\\\n'\\.repeat\\(5\\)", - "(process.platform === 'win32' ? 'xyz\\r\\n' : 'xyz\\n').repeat(5)" -] +const testParallelTickSaveHook = ['async_hooks.createHook\\(\\{', 'const hook = async_hooks.createHook({'] const readmeInfo = ['(This package is a mirror of the streams implementations in Node.js) (\\d+.\\d+.\\d+).', '$1 $2.'] @@ -285,88 +196,64 @@ const readmeLink = ['(\\[Node.js website\\]\\(https://nodejs.org/dist/v)(\\d+.\\ export const replacements = { 'lib/_stream.+': [legacyStreamsRequireStream], 'lib/internal/streams/.+': [ - streamsInternalsPrimordials, - streamsInternalsInspect, - streamsInternalsRequireStreams, - streamsInternalsRequireRelativeUtilDebuglog, - streamsInternalsRequireRelativeInternalUtil, - streamsInternalsRequireInternal, - streamsInternalsRequireRelativeDuplex, - streamsInternalsRequireWebStream, - streamsInternalNoRequireAbortController, - streamsInternalWeakHandler, - streamsInternalBlob - ], - 'lib/internal/errors': [errorsRequireRelativeInspect, errorsRequireTty, errorsRequireCheckCaptureStackTrace], - 'lib/internal/inspect.js': [inspectNativeModule, inspectIntl, inspectIcuBinding, inspectRequireUtil], - 'lib/internal/inspect-browser.js': [ - inspectNativeModule, - inspectIntl, - inspectIcuBinding, - inspectRequireUtil, - inspectSequencesRegExp, - inspectSequencesReplacer, - inspectSequencesRegExpSingle, - inspectSequencesReplacerSingle, - inspectLookBehind - ], - 'lib/internal/js_stream_socket.js': [streamSocketInspectBinding, streamSocketDebugLog], - 'lib/internal/primordials.js': [primordialsDefine, primordialsAggregateError], - 'lib/internal/validators.js': [validatorSignals], - 'lib/internal/webstreams/.+': [ - webstreamPrimordials, - webstreamsRequireRelative, - webstreamsRequireStreams, - webstreamsRequireStream, - webstreamsRequireUtil, - webstreamsRequireErrorsOrValidators, - webstreamsConsumerNoRequireTextAPI + internalStreamsNoRequireAbortController, + internalStreamsRequireErrors, + internalStreamsRequirePrimordials, + internalStreamsRequireRelativeDuplex, + internalStreamsRequireRelativeUtil, + internalStreamsRequireStreams, + internalStreamsRequireUtil, + internalStreamsRequireUtilDebuglog, + internalStreamsRequireWebStream, + internalStreamsRequireInternal, + internalStreamsWeakHandler, + internalStreamsBlob, + internalStreamsInspectCustom ], - // Keep this after all the rest in the same folder - 'lib/internal/(?:errors|inspect|inspect-browser|js_stream_socket|primordials|validators).js': [ - internalsRequireAssert, - internalRequireRelativeTypes, - internalRequireRelativeInternalUtil, - internalRequireRelativeInternal, - internalRequireAsyncHooks, - internalPrimordials, - internalNoCoalesceAssignment + 'lib/internal/validators.js': [ + internalValidatorsRequireAssert, + internalValidatorsRequireAsyncHooks, + internalValidatorsRequireErrors, + internalValidatorsRequirePrimordials, + internalValidatorsRequireRelativeUtil, + internalValidatorsRequireUtilTypes, + internalValidatorsNoRequireSignals, + internalValidatorsNoCoalesceAssignment ], 'lib/stream.js': [ - streamIndexPrimordials, - streamIndexRequireInternalBuffer, streamIndexIsUint8Array, streamIndexUint8ArrayToBuffer, + streamIndexRequireInternalBuffer, + streamIndexRequireErrors, + streamIndexRequirePrimordials, + streamIndexRequirePromises, streamIndexRequireUtil, - streamIndexRequireInternal, - streamIndexRequirePromises + streamIndexRequireInternal ], - 'lib/stream/.+': [streamsPrimordials, streamsRequireInternal], - 'lib/stream/consumers.js': [ - streamsConsumerTextDecoder, - streamsConsumerNoRequireBlob, - streamsConsumerRequireBlobFromBuffer - ], - 'test/common/index.js': [testKnownGlobals], + 'lib/stream/.+': [streamsRequireErrors, streamsRequirePrimordials, streamsRequireInternal], + 'test/common/index.js': [testCommonKnownGlobals], 'test/parallel/.+': [ testParallelIncludeTap, testParallelRequireStream, - testParallelRequireStreamPromises, testParallelRequireStreamConsumer, - testParallelRequireStreamWeb, - testParallelRequireStreamInternalsLegacy, testParallelRequireStreamInternals, - testParallelImportTapInMjs, + testParallelRequireStreamInternalsLegacy, + testParallelRequireStreamPromises, + testParallelRequireStreamWeb, testParallelImportStreamInMjs, - testParallelSilentConsole, - testParallelHasOwn, + testParallelImportTapInMjs, testParallelBindings, + testParallelHasOwn, + testParallelSilentConsole, testParallelTimersPromises ], - 'test/parallel/test-stream-finished.js': [testFinishedEvent], - 'test/parallel/test-stream-flatMap.js': [testFlatMapWinLineSeparator], - 'test/parallel/test-stream-preprocess.js': [testPreprocessWinLineSeparator], - 'test/parallel/test-stream-writable-samecb-singletick.js': [testTicksReenableConsoleLog, testTickSaveHook], - 'test/parallel/test-stream2-readable-from-list.js': [testReadableBufferListInspect], + 'test/parallel/test-stream-finished.js': [testParallelFinishedEvent], + 'test/parallel/test-stream-flatMap.js': [testParallelFlatMapWinLineSeparator], + 'test/parallel/test-stream-preprocess.js': [testParallelPreprocessWinLineSeparator], + 'test/parallel/test-stream-writable-samecb-singletick.js': [ + testParallelTicksReenableConsoleLog, + testParallelTickSaveHook + ], + 'test/parallel/test-stream2-readable-from-list.js': [testParallelReadableBufferListInspect], 'README.md': [readmeInfo, readmeLink] } diff --git a/lib/_stream_wrap.js b/lib/_stream_wrap.js deleted file mode 100644 index 904128a382..0000000000 --- a/lib/_stream_wrap.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict'; - -module.exports = require('internal/js_stream_socket'); -process.emitWarning('The _stream_wrap module is deprecated.', - 'DeprecationWarning', 'DEP0125'); diff --git a/lib/internal/errors.js b/lib/internal/errors.js deleted file mode 100644 index 15e6c88a1e..0000000000 --- a/lib/internal/errors.js +++ /dev/null @@ -1,1663 +0,0 @@ -/* eslint node-core/documented-errors: "error" */ -/* eslint node-core/alphabetize-errors: "error" */ -/* eslint node-core/prefer-util-format-errors: "error" */ - -'use strict'; - -// The whole point behind this internal module is to allow Node.js to no -// longer be forced to treat every error message change as a semver-major -// change. The NodeError classes here all expose a `code` property whose -// value statically and permanently identifies the error. While the error -// message may change, the code should not. - -const { - AggregateError, - ArrayFrom, - ArrayIsArray, - ArrayPrototypeFilter, - ArrayPrototypeIncludes, - ArrayPrototypeIndexOf, - ArrayPrototypeJoin, - ArrayPrototypeMap, - ArrayPrototypePop, - ArrayPrototypePush, - ArrayPrototypeSlice, - ArrayPrototypeSplice, - ArrayPrototypeUnshift, - Error, - ErrorCaptureStackTrace, - ErrorPrototypeToString, - JSONStringify, - MapPrototypeGet, - MathAbs, - MathMax, - Number, - NumberIsInteger, - ObjectAssign, - ObjectDefineProperty, - ObjectDefineProperties, - ObjectIsExtensible, - ObjectGetOwnPropertyDescriptor, - ObjectKeys, - ObjectPrototypeHasOwnProperty, - RangeError, - ReflectApply, - RegExpPrototypeTest, - SafeArrayIterator, - SafeMap, - SafeWeakMap, - String, - StringPrototypeEndsWith, - StringPrototypeIncludes, - StringPrototypeMatch, - StringPrototypeSlice, - StringPrototypeSplit, - StringPrototypeStartsWith, - StringPrototypeToLowerCase, - Symbol, - SymbolFor, - SyntaxError, - TypeError, - URIError, -} = require('./primordials'); - -const kIsNodeError = Symbol('kIsNodeError'); - -const isWindows = process.platform === 'win32'; - -const messages = new SafeMap(); -const codes = {}; - -const classRegExp = /^([A-Z][a-z0-9]*)+$/; -// Sorted by a rough estimate on most frequently used entries. -const kTypes = [ - 'string', - 'function', - 'number', - 'object', - // Accept 'Function' and 'Object' as alternative to the lower cased version. - 'Function', - 'Object', - 'boolean', - 'bigint', - 'symbol', -]; - -const MainContextError = Error; -const overrideStackTrace = new SafeWeakMap(); -const kNoOverride = Symbol('kNoOverride'); -let userStackTraceLimit; -const nodeInternalPrefix = '__node_internal_'; -const prepareStackTrace = (globalThis, error, trace) => { - // API for node internals to override error stack formatting - // without interfering with userland code. - if (overrideStackTrace.has(error)) { - const f = overrideStackTrace.get(error); - overrideStackTrace.delete(error); - return f(error, trace); - } - - const firstFrame = trace[0]?.getFunctionName(); - if (firstFrame && StringPrototypeStartsWith(firstFrame, nodeInternalPrefix)) { - for (let l = trace.length - 1; l >= 0; l--) { - const fn = trace[l]?.getFunctionName(); - if (fn && StringPrototypeStartsWith(fn, nodeInternalPrefix)) { - ArrayPrototypeSplice(trace, 0, l + 1); - break; - } - } - // `userStackTraceLimit` is the user value for `Error.stackTraceLimit`, - // it is updated at every new exception in `captureLargerStackTrace`. - if (trace.length > userStackTraceLimit) - ArrayPrototypeSplice(trace, userStackTraceLimit); - } - - const globalOverride = - maybeOverridePrepareStackTrace(globalThis, error, trace); - if (globalOverride !== kNoOverride) return globalOverride; - - // Normal error formatting: - // - // Error: Message - // at function (file) - // at file - let errorString; - if (kIsNodeError in error) { - errorString = `${error.name} [${error.code}]: ${error.message}`; - } else { - errorString = ErrorPrototypeToString(error); - } - if (trace.length === 0) { - return errorString; - } - return `${errorString}\n at ${ArrayPrototypeJoin(trace, '\n at ')}`; -}; - -const maybeOverridePrepareStackTrace = (globalThis, error, trace) => { - // Polyfill of V8's Error.prepareStackTrace API. - // https://crbug.com/v8/7848 - // `globalThis` is the global that contains the constructor which - // created `error`. - if (typeof globalThis.Error?.prepareStackTrace === 'function') { - return globalThis.Error.prepareStackTrace(error, trace); - } - // We still have legacy usage that depends on the main context's `Error` - // being used, even when the error is from a different context. - // TODO(devsnek): evaluate if this can be eventually deprecated/removed. - if (typeof MainContextError.prepareStackTrace === 'function') { - return MainContextError.prepareStackTrace(error, trace); - } - - return kNoOverride; -}; - -const aggregateTwoErrors = hideStackFrames((innerError, outerError) => { - if (innerError && outerError && innerError !== outerError) { - if (ArrayIsArray(outerError.errors)) { - // If `outerError` is already an `AggregateError`. - ArrayPrototypePush(outerError.errors, innerError); - return outerError; - } - // eslint-disable-next-line no-restricted-syntax - const err = new AggregateError(new SafeArrayIterator([ - outerError, - innerError, - ]), outerError.message); - err.code = outerError.code; - return err; - } - return innerError || outerError; -}); - -// Lazily loaded -let util; -let assert; - -let internalUtil = null; -function lazyInternalUtil() { - if (!internalUtil) { - internalUtil = require('../util'); - } - return internalUtil; -} - -let internalUtilInspect = null; -function lazyInternalUtilInspect() { - if (!internalUtilInspect) { - internalUtilInspect = require('./inspect'); - } - return internalUtilInspect; -} - -let buffer; -function lazyBuffer() { - if (buffer === undefined) - buffer = require('buffer').Buffer; - return buffer; -} - -function isErrorStackTraceLimitWritable() { - const desc = ObjectGetOwnPropertyDescriptor(Error, 'stackTraceLimit'); - if (desc === undefined) { - return ObjectIsExtensible(Error); - } - - return ObjectPrototypeHasOwnProperty(desc, 'writable') ? - desc.writable : - desc.set !== undefined; -} - -// A specialized Error that includes an additional info property with -// additional information about the error condition. -// It has the properties present in a UVException but with a custom error -// message followed by the uv error code and uv error message. -// It also has its own error code with the original uv error context put into -// `err.info`. -// The context passed into this error must have .code, .syscall and .message, -// and may have .path and .dest. -class SystemError extends Error { - constructor(key, context) { - const limit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - super(); - // Reset the limit and setting the name property. - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; - const prefix = getMessage(key, [], this); - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})`; - - if (context.path !== undefined) - message += ` ${context.path}`; - if (context.dest !== undefined) - message += ` => ${context.dest}`; - - captureLargerStackTrace(this); - - this.code = key; - - ObjectDefineProperties(this, { - [kIsNodeError]: { - value: true, - enumerable: false, - writable: false, - configurable: true, - }, - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get() { - return context.errno; - }, - set: (value) => { - context.errno = value; - }, - enumerable: true, - configurable: true, - }, - syscall: { - get() { - return context.syscall; - }, - set: (value) => { - context.syscall = value; - }, - enumerable: true, - configurable: true, - }, - }); - - if (context.path !== undefined) { - // TODO(BridgeAR): Investigate why and when the `.toString()` was - // introduced. The `path` and `dest` properties in the context seem to - // always be of type string. We should probably just remove the - // `.toString()` and `Buffer.from()` operations and set the value on the - // context as the user did. - ObjectDefineProperty(this, 'path', { - get() { - return context.path != null ? - context.path.toString() : context.path; - }, - set: (value) => { - context.path = value ? - lazyBuffer().from(value.toString()) : undefined; - }, - enumerable: true, - configurable: true - }); - } - - if (context.dest !== undefined) { - ObjectDefineProperty(this, 'dest', { - get() { - return context.dest != null ? - context.dest.toString() : context.dest; - }, - set: (value) => { - context.dest = value ? - lazyBuffer().from(value.toString()) : undefined; - }, - enumerable: true, - configurable: true - }); - } - } - - toString() { - return `${this.name} [${this.code}]: ${this.message}`; - } - - [SymbolFor('nodejs.util.inspect.custom')](recurseTimes, ctx) { - return lazyInternalUtilInspect().inspect(this, { - ...ctx, - getters: true, - customInspect: false - }); - } -} - -function makeSystemErrorWithCode(key) { - return class NodeError extends SystemError { - constructor(ctx) { - super(key, ctx); - } - }; -} - -function makeNodeErrorWithCode(Base, key) { - return function NodeError(...args) { - const limit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - const error = new Base(); - // Reset the limit and setting the name property. - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; - const message = getMessage(key, args, error); - ObjectDefineProperties(error, { - [kIsNodeError]: { - value: true, - enumerable: false, - writable: false, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - toString: { - value() { - return `${this.name} [${key}]: ${this.message}`; - }, - enumerable: false, - writable: true, - configurable: true, - }, - }); - captureLargerStackTrace(error); - error.code = key; - return error; - }; -} - -/** - * This function removes unnecessary frames from Node.js core errors. - * @template {(...args: any[]) => any} T - * @type {(fn: T) => T} - */ -function hideStackFrames(fn) { - // We rename the functions that will be hidden to cut off the stacktrace - // at the outermost one - const hidden = nodeInternalPrefix + fn.name; - ObjectDefineProperty(fn, 'name', { value: hidden }); - return fn; -} - -// Utility function for registering the error codes. Only used here. Exported -// *only* to allow for testing. -function E(sym, val, def, ...otherClasses) { - // Special case for SystemError that formats the error message differently - // The SystemErrors only have SystemError as their base classes. - messages.set(sym, val); - if (def === SystemError) { - def = makeSystemErrorWithCode(sym); - } else { - def = makeNodeErrorWithCode(def, sym); - } - - if (otherClasses.length !== 0) { - otherClasses.forEach((clazz) => { - def[clazz.name] = makeNodeErrorWithCode(clazz, sym); - }); - } - codes[sym] = def; -} - -function getMessage(key, args, self) { - const msg = messages.get(key); - - if (assert === undefined) assert = require('assert'); - - if (typeof msg === 'function') { - assert( - msg.length <= args.length, // Default options do not count. - `Code: ${key}; The provided arguments length (${args.length}) does not ` + - `match the required ones (${msg.length}).` - ); - return ReflectApply(msg, self, args); - } - - const expectedLength = - (StringPrototypeMatch(msg, /%[dfijoOs]/g) || []).length; - assert( - expectedLength === args.length, - `Code: ${key}; The provided arguments length (${args.length}) does not ` + - `match the required ones (${expectedLength}).` - ); - if (args.length === 0) - return msg; - - ArrayPrototypeUnshift(args, msg); - return ReflectApply(lazyInternalUtilInspect().format, null, args); -} - -let uvBinding; - -function lazyUv() { - if (!uvBinding) { - uvBinding = internalBinding('uv'); - } - return uvBinding; -} - -const uvUnmappedError = ['UNKNOWN', 'unknown error']; - -function uvErrmapGet(name) { - uvBinding = lazyUv(); - if (!uvBinding.errmap) { - uvBinding.errmap = uvBinding.getErrorMap(); - } - return MapPrototypeGet(uvBinding.errmap, name); -} - -const captureLargerStackTrace = hideStackFrames( - function captureLargerStackTrace(err) { - const stackTraceLimitIsWritable = isErrorStackTraceLimitWritable(); - if (stackTraceLimitIsWritable) { - userStackTraceLimit = Error.stackTraceLimit; - Error.stackTraceLimit = Infinity; - } - - if (typeof ErrorCaptureStackTrace === 'function') { - ErrorCaptureStackTrace(err); - } - - // Reset the limit - if (stackTraceLimitIsWritable) Error.stackTraceLimit = userStackTraceLimit; - - return err; - }); - -/** - * This creates an error compatible with errors produced in the C++ - * function UVException using a context object with data assembled in C++. - * The goal is to migrate them to ERR_* errors later when compatibility is - * not a concern. - * - * @param {object} ctx - * @returns {Error} - */ -const uvException = hideStackFrames(function uvException(ctx) { - const { 0: code, 1: uvmsg } = uvErrmapGet(ctx.errno) || uvUnmappedError; - let message = `${code}: ${ctx.message || uvmsg}, ${ctx.syscall}`; - - let path; - let dest; - if (ctx.path) { - path = ctx.path.toString(); - message += ` '${path}'`; - } - if (ctx.dest) { - dest = ctx.dest.toString(); - message += ` -> '${dest}'`; - } - - // Reducing the limit improves the performance significantly. We do not lose - // the stack frames due to the `captureStackTrace()` function that is called - // later. - const tmpLimit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - // Pass the message to the constructor instead of setting it on the object - // to make sure it is the same as the one created in C++ - // eslint-disable-next-line no-restricted-syntax - const err = new Error(message); - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; - - for (const prop of ObjectKeys(ctx)) { - if (prop === 'message' || prop === 'path' || prop === 'dest') { - continue; - } - err[prop] = ctx[prop]; - } - - err.code = code; - if (path) { - err.path = path; - } - if (dest) { - err.dest = dest; - } - - return captureLargerStackTrace(err); -}); - -/** - * This creates an error compatible with errors produced in the C++ - * This function should replace the deprecated - * `exceptionWithHostPort()` function. - * - * @param {number} err - A libuv error number - * @param {string} syscall - * @param {string} address - * @param {number} [port] - * @returns {Error} - */ -const uvExceptionWithHostPort = hideStackFrames( - function uvExceptionWithHostPort(err, syscall, address, port) { - const { 0: code, 1: uvmsg } = uvErrmapGet(err) || uvUnmappedError; - const message = `${syscall} ${code}: ${uvmsg}`; - let details = ''; - - if (port && port > 0) { - details = ` ${address}:${port}`; - } else if (address) { - details = ` ${address}`; - } - - // Reducing the limit improves the performance significantly. We do not - // lose the stack frames due to the `captureStackTrace()` function that - // is called later. - const tmpLimit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - // eslint-disable-next-line no-restricted-syntax - const ex = new Error(`${message}${details}`); - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; - ex.code = code; - ex.errno = err; - ex.syscall = syscall; - ex.address = address; - if (port) { - ex.port = port; - } - - return captureLargerStackTrace(ex); - }); - -/** - * This used to be util._errnoException(). - * - * @param {number} err - A libuv error number - * @param {string} syscall - * @param {string} [original] - * @returns {Error} - */ -const errnoException = hideStackFrames( - function errnoException(err, syscall, original) { - // TODO(joyeecheung): We have to use the type-checked - // getSystemErrorName(err) to guard against invalid arguments from users. - // This can be replaced with [ code ] = errmap.get(err) when this method - // is no longer exposed to user land. - if (util === undefined) util = require('util'); - const code = util.getSystemErrorName(err); - const message = original ? - `${syscall} ${code} ${original}` : `${syscall} ${code}`; - - const tmpLimit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - // eslint-disable-next-line no-restricted-syntax - const ex = new Error(message); - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; - ex.errno = err; - ex.code = code; - ex.syscall = syscall; - - return captureLargerStackTrace(ex); - }); - -/** - * Deprecated, new function is `uvExceptionWithHostPort()` - * New function added the error description directly - * from C++. this method for backwards compatibility - * @param {number} err - A libuv error number - * @param {string} syscall - * @param {string} address - * @param {number} [port] - * @param {string} [additional] - * @returns {Error} - */ -const exceptionWithHostPort = hideStackFrames( - function exceptionWithHostPort(err, syscall, address, port, additional) { - // TODO(joyeecheung): We have to use the type-checked - // getSystemErrorName(err) to guard against invalid arguments from users. - // This can be replaced with [ code ] = errmap.get(err) when this method - // is no longer exposed to user land. - if (util === undefined) util = require('util'); - const code = util.getSystemErrorName(err); - let details = ''; - if (port && port > 0) { - details = ` ${address}:${port}`; - } else if (address) { - details = ` ${address}`; - } - if (additional) { - details += ` - Local (${additional})`; - } - - // Reducing the limit improves the performance significantly. We do not - // lose the stack frames due to the `captureStackTrace()` function that - // is called later. - const tmpLimit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - // eslint-disable-next-line no-restricted-syntax - const ex = new Error(`${syscall} ${code}${details}`); - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; - ex.errno = err; - ex.code = code; - ex.syscall = syscall; - ex.address = address; - if (port) { - ex.port = port; - } - - return captureLargerStackTrace(ex); - }); - -/** - * @param {number|string} code - A libuv error number or a c-ares error code - * @param {string} syscall - * @param {string} [hostname] - * @returns {Error} - */ -const dnsException = hideStackFrames(function(code, syscall, hostname) { - let errno; - // If `code` is of type number, it is a libuv error number, else it is a - // c-ares error code. - // TODO(joyeecheung): translate c-ares error codes into numeric ones and - // make them available in a property that's not error.errno (since they - // can be in conflict with libuv error codes). Also make sure - // util.getSystemErrorName() can understand them when an being informed that - // the number is a c-ares error code. - if (typeof code === 'number') { - errno = code; - // ENOTFOUND is not a proper POSIX error, but this error has been in place - // long enough that it's not practical to remove it. - if (code === lazyUv().UV_EAI_NODATA || code === lazyUv().UV_EAI_NONAME) { - code = 'ENOTFOUND'; // Fabricated error name. - } else { - code = lazyInternalUtil().getSystemErrorName(code); - } - } - const message = `${syscall} ${code}${hostname ? ` ${hostname}` : ''}`; - // Reducing the limit improves the performance significantly. We do not lose - // the stack frames due to the `captureStackTrace()` function that is called - // later. - const tmpLimit = Error.stackTraceLimit; - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; - // eslint-disable-next-line no-restricted-syntax - const ex = new Error(message); - if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = tmpLimit; - ex.errno = errno; - ex.code = code; - ex.syscall = syscall; - if (hostname) { - ex.hostname = hostname; - } - - return captureLargerStackTrace(ex); -}); - -function connResetException(msg) { - // eslint-disable-next-line no-restricted-syntax - const ex = new Error(msg); - ex.code = 'ECONNRESET'; - return ex; -} - -let maxStack_ErrorName; -let maxStack_ErrorMessage; -/** - * Returns true if `err.name` and `err.message` are equal to engine-specific - * values indicating max call stack size has been exceeded. - * "Maximum call stack size exceeded" in V8. - * - * @param {Error} err - * @returns {boolean} - */ -function isStackOverflowError(err) { - if (maxStack_ErrorMessage === undefined) { - try { - function overflowStack() { overflowStack(); } - overflowStack(); - } catch (err) { - maxStack_ErrorMessage = err.message; - maxStack_ErrorName = err.name; - } - } - - return err && err.name === maxStack_ErrorName && - err.message === maxStack_ErrorMessage; -} - -// Only use this for integers! Decimal numbers do not work with this function. -function addNumericalSeparator(val) { - let res = ''; - let i = val.length; - const start = val[0] === '-' ? 1 : 0; - for (; i >= start + 4; i -= 3) { - res = `_${StringPrototypeSlice(val, i - 3, i)}${res}`; - } - return `${StringPrototypeSlice(val, 0, i)}${res}`; -} - -// Used to enhance the stack that will be picked up by the inspector -const kEnhanceStackBeforeInspector = Symbol('kEnhanceStackBeforeInspector'); - -// These are supposed to be called only on fatal exceptions before -// the process exits. -const fatalExceptionStackEnhancers = { - beforeInspector(error) { - if (typeof error[kEnhanceStackBeforeInspector] !== 'function') { - return error.stack; - } - - try { - // Set the error.stack here so it gets picked up by the - // inspector. - error.stack = error[kEnhanceStackBeforeInspector](); - } catch { - // We are just enhancing the error. If it fails, ignore it. - } - return error.stack; - }, - afterInspector(error) { - const originalStack = error.stack; - let useColors = true; - // Some consoles do not convert ANSI escape sequences to colors, - // rather display them directly to the stdout. On those consoles, - // libuv emulates colors by intercepting stdout stream and calling - // corresponding Windows API functions for setting console colors. - // However, fatal error are handled differently and we cannot easily - // highlight them. On Windows, detecting whether a console supports - // ANSI escape sequences is not reliable. - if (process.platform === 'win32') { - const info = internalBinding('os').getOSInformation(); - const ver = ArrayPrototypeMap(StringPrototypeSplit(info[2], '.'), - Number); - if (ver[0] !== 10 || ver[2] < 14393) { - useColors = false; - } - } - const { - inspect, - inspectDefaultOptions: { - colors: defaultColors - } - } = lazyInternalUtilInspect(); - const colors = useColors && - ((internalBinding('util').guessHandleType(2) === 'TTY' && - false) || - defaultColors); - try { - return inspect(error, { - colors, - customInspect: false, - depth: MathMax(inspect.defaultOptions.depth, 5) - }); - } catch { - return originalStack; - } - } -}; - -// Ensures the printed error line is from user code. -let _kArrowMessagePrivateSymbol, _setHiddenValue; -function setArrowMessage(err, arrowMessage) { - if (!_kArrowMessagePrivateSymbol) { - ({ - arrow_message_private_symbol: _kArrowMessagePrivateSymbol, - setHiddenValue: _setHiddenValue, - } = internalBinding('util')); - } - _setHiddenValue(err, _kArrowMessagePrivateSymbol, arrowMessage); -} - -// Hide stack lines before the first user code line. -function hideInternalStackFrames(error) { - overrideStackTrace.set(error, (error, stackFrames) => { - let frames = stackFrames; - if (typeof stackFrames === 'object') { - frames = ArrayPrototypeFilter( - stackFrames, - (frm) => !StringPrototypeStartsWith(frm.getFileName() || '', - 'node:internal') - ); - } - ArrayPrototypeUnshift(frames, error); - return ArrayPrototypeJoin(frames, '\n at '); - }); -} - -// Node uses an AbortError that isn't exactly the same as the DOMException -// to make usage of the error in userland and readable-stream easier. -// It is a regular error with `.code` and `.name`. -class AbortError extends Error { - constructor(message = 'The operation was aborted', options = undefined) { - if (options !== undefined && typeof options !== 'object') { - throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options); - } - super(message, options); - this.code = 'ABORT_ERR'; - this.name = 'AbortError'; - } -} - -/** - * This creates a generic Node.js error. - * - * @param {string} message The error message. - * @param {object} errorProperties Object with additional properties to be added to the error. - * @returns {Error} - */ -const genericNodeError = hideStackFrames(function genericNodeError(message, errorProperties) { - // eslint-disable-next-line no-restricted-syntax - const err = new Error(message); - ObjectAssign(err, errorProperties); - return err; -}); - -module.exports = { - AbortError, - aggregateTwoErrors, - captureLargerStackTrace, - codes, - connResetException, - dnsException, - // This is exported only to facilitate testing. - E, - errnoException, - exceptionWithHostPort, - fatalExceptionStackEnhancers, - genericNodeError, - getMessage, - hideInternalStackFrames, - hideStackFrames, - isErrorStackTraceLimitWritable, - isStackOverflowError, - kEnhanceStackBeforeInspector, - kIsNodeError, - kNoOverride, - maybeOverridePrepareStackTrace, - overrideStackTrace, - prepareStackTrace, - setArrowMessage, - SystemError, - uvErrmapGet, - uvException, - uvExceptionWithHostPort, -}; - -// To declare an error message, use the E(sym, val, def) function above. The sym -// must be an upper case string. The val can be either a function or a string. -// The def must be an error class. -// The return value of the function must be a string. -// Examples: -// E('EXAMPLE_KEY1', 'This is the error value', Error); -// E('EXAMPLE_KEY2', (a, b) => return `${a} ${b}`, RangeError); -// -// Once an error code has been assigned, the code itself MUST NOT change and -// any given error code must never be reused to identify a different error. -// -// Any error code added here should also be added to the documentation -// -// Note: Please try to keep these in alphabetical order -// -// Note: Node.js specific errors must begin with the prefix ERR_ - -E('ERR_AMBIGUOUS_ARGUMENT', 'The "%s" argument is ambiguous. %s', TypeError); -E('ERR_ARG_NOT_ITERABLE', '%s must be iterable', TypeError); -E('ERR_ASSERTION', '%s', Error); -E('ERR_ASYNC_CALLBACK', '%s must be a function', TypeError); -E('ERR_ASYNC_TYPE', 'Invalid name for async "type": %s', TypeError); -E('ERR_BROTLI_INVALID_PARAM', '%s is not a valid Brotli parameter', RangeError); -E('ERR_BUFFER_OUT_OF_BOUNDS', - // Using a default argument here is important so the argument is not counted - // towards `Function#length`. - (name = undefined) => { - if (name) { - return `"${name}" is outside of buffer bounds`; - } - return 'Attempt to access memory outside buffer bounds'; - }, RangeError); -E('ERR_BUFFER_TOO_LARGE', - 'Cannot create a Buffer larger than %s bytes', - RangeError); -E('ERR_CANNOT_WATCH_SIGINT', 'Cannot watch for SIGINT signals', Error); -E('ERR_CHILD_CLOSED_BEFORE_REPLY', - 'Child closed before reply received', Error); -E('ERR_CHILD_PROCESS_IPC_REQUIRED', - "Forked processes must have an IPC channel, missing value 'ipc' in %s", - Error); -E('ERR_CHILD_PROCESS_STDIO_MAXBUFFER', '%s maxBuffer length exceeded', - RangeError); -E('ERR_CONSOLE_WRITABLE_STREAM', - 'Console expects a writable stream instance for %s', TypeError); -E('ERR_CONTEXT_NOT_INITIALIZED', 'context used is not initialized', Error); -E('ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED', - 'Custom engines not supported by this OpenSSL', Error); -E('ERR_CRYPTO_ECDH_INVALID_FORMAT', 'Invalid ECDH format: %s', TypeError); -E('ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY', - 'Public key is not valid for specified curve', Error); -E('ERR_CRYPTO_ENGINE_UNKNOWN', 'Engine "%s" was not found', Error); -E('ERR_CRYPTO_FIPS_FORCED', - 'Cannot set FIPS mode, it was forced with --force-fips at startup.', Error); -E('ERR_CRYPTO_FIPS_UNAVAILABLE', 'Cannot set FIPS mode in a non-FIPS build.', - Error); -E('ERR_CRYPTO_HASH_FINALIZED', 'Digest already called', Error); -E('ERR_CRYPTO_HASH_UPDATE_FAILED', 'Hash update failed', Error); -E('ERR_CRYPTO_INCOMPATIBLE_KEY', 'Incompatible %s: %s', Error); -E('ERR_CRYPTO_INCOMPATIBLE_KEY_OPTIONS', 'The selected key encoding %s %s.', - Error); -E('ERR_CRYPTO_INVALID_DIGEST', 'Invalid digest: %s', TypeError); -E('ERR_CRYPTO_INVALID_JWK', 'Invalid JWK data', TypeError); -E('ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE', - 'Invalid key object type %s, expected %s.', TypeError); -E('ERR_CRYPTO_INVALID_STATE', 'Invalid state for operation %s', Error); -E('ERR_CRYPTO_PBKDF2_ERROR', 'PBKDF2 error', Error); -E('ERR_CRYPTO_SCRYPT_INVALID_PARAMETER', 'Invalid scrypt parameter', Error); -E('ERR_CRYPTO_SCRYPT_NOT_SUPPORTED', 'Scrypt algorithm not supported', Error); -// Switch to TypeError. The current implementation does not seem right. -E('ERR_CRYPTO_SIGN_KEY_REQUIRED', 'No key provided to sign', Error); -E('ERR_DEBUGGER_ERROR', '%s', Error); -E('ERR_DEBUGGER_STARTUP_ERROR', '%s', Error); -E('ERR_DIR_CLOSED', 'Directory handle was closed', Error); -E('ERR_DIR_CONCURRENT_OPERATION', - 'Cannot do synchronous work on directory handle with concurrent ' + - 'asynchronous operations', Error); -E('ERR_DNS_SET_SERVERS_FAILED', 'c-ares failed to set servers: "%s" [%s]', - Error); -E('ERR_DOMAIN_CALLBACK_NOT_AVAILABLE', - 'A callback was registered through ' + - 'process.setUncaughtExceptionCaptureCallback(), which is mutually ' + - 'exclusive with using the `domain` module', - Error); -E('ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE', - 'The `domain` module is in use, which is mutually exclusive with calling ' + - 'process.setUncaughtExceptionCaptureCallback()', - Error); -E('ERR_ENCODING_INVALID_ENCODED_DATA', function(encoding, ret) { - this.errno = ret; - return `The encoded data was not valid for encoding ${encoding}`; -}, TypeError); -E('ERR_ENCODING_NOT_SUPPORTED', 'The "%s" encoding is not supported', - RangeError); -E('ERR_EVAL_ESM_CANNOT_PRINT', '--print cannot be used with ESM input', Error); -E('ERR_EVENT_RECURSION', 'The event "%s" is already being dispatched', Error); -E('ERR_FALSY_VALUE_REJECTION', function(reason) { - this.reason = reason; - return 'Promise was rejected with falsy value'; -}, Error); -E('ERR_FEATURE_UNAVAILABLE_ON_PLATFORM', - 'The feature %s is unavailable on the current platform' + - ', which is being used to run Node.js', - TypeError); -E('ERR_FS_CP_DIR_TO_NON_DIR', - 'Cannot overwrite directory with non-directory', SystemError); -E('ERR_FS_CP_EEXIST', 'Target already exists', SystemError); -E('ERR_FS_CP_EINVAL', 'Invalid src or dest', SystemError); -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe', SystemError); -E('ERR_FS_CP_NON_DIR_TO_DIR', - 'Cannot overwrite non-directory with directory', SystemError); -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file', SystemError); -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', - 'Cannot overwrite symlink in subdirectory of self', SystemError); -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type', SystemError); -E('ERR_FS_EISDIR', 'Path is a directory', SystemError); -E('ERR_FS_FILE_TOO_LARGE', 'File size (%s) is greater than 2 GB', RangeError); -E('ERR_FS_INVALID_SYMLINK_TYPE', - 'Symlink type must be one of "dir", "file", or "junction". Received "%s"', - Error); // Switch to TypeError. The current implementation does not seem right -E('ERR_HTTP2_ALTSVC_INVALID_ORIGIN', - 'HTTP/2 ALTSVC frames require a valid origin', TypeError); -E('ERR_HTTP2_ALTSVC_LENGTH', - 'HTTP/2 ALTSVC frames are limited to 16382 bytes', TypeError); -E('ERR_HTTP2_CONNECT_AUTHORITY', - ':authority header is required for CONNECT requests', Error); -E('ERR_HTTP2_CONNECT_PATH', - 'The :path header is forbidden for CONNECT requests', Error); -E('ERR_HTTP2_CONNECT_SCHEME', - 'The :scheme header is forbidden for CONNECT requests', Error); -E('ERR_HTTP2_GOAWAY_SESSION', - 'New streams cannot be created after receiving a GOAWAY', Error); -E('ERR_HTTP2_HEADERS_AFTER_RESPOND', - 'Cannot specify additional headers after response initiated', Error); -E('ERR_HTTP2_HEADERS_SENT', 'Response has already been initiated.', Error); -E('ERR_HTTP2_HEADER_SINGLE_VALUE', - 'Header field "%s" must only have a single value', TypeError); -E('ERR_HTTP2_INFO_STATUS_NOT_ALLOWED', - 'Informational status codes cannot be used', RangeError); -E('ERR_HTTP2_INVALID_CONNECTION_HEADERS', - 'HTTP/1 Connection specific headers are forbidden: "%s"', TypeError); -E('ERR_HTTP2_INVALID_HEADER_VALUE', - 'Invalid value "%s" for header "%s"', TypeError); -E('ERR_HTTP2_INVALID_INFO_STATUS', - 'Invalid informational status code: %s', RangeError); -E('ERR_HTTP2_INVALID_ORIGIN', - 'HTTP/2 ORIGIN frames require a valid origin', TypeError); -E('ERR_HTTP2_INVALID_PACKED_SETTINGS_LENGTH', - 'Packed settings length must be a multiple of six', RangeError); -E('ERR_HTTP2_INVALID_PSEUDOHEADER', - '"%s" is an invalid pseudoheader or is used incorrectly', TypeError); -E('ERR_HTTP2_INVALID_SESSION', 'The session has been destroyed', Error); -E('ERR_HTTP2_INVALID_SETTING_VALUE', - // Using default arguments here is important so the arguments are not counted - // towards `Function#length`. - function(name, actual, min = undefined, max = undefined) { - this.actual = actual; - if (min !== undefined) { - this.min = min; - this.max = max; - } - return `Invalid value for setting "${name}": ${actual}`; - }, TypeError, RangeError); -E('ERR_HTTP2_INVALID_STREAM', 'The stream has been destroyed', Error); -E('ERR_HTTP2_MAX_PENDING_SETTINGS_ACK', - 'Maximum number of pending settings acknowledgements', Error); -E('ERR_HTTP2_NESTED_PUSH', - 'A push stream cannot initiate another push stream.', Error); -E('ERR_HTTP2_NO_MEM', 'Out of memory', Error); -E('ERR_HTTP2_NO_SOCKET_MANIPULATION', - 'HTTP/2 sockets should not be directly manipulated (e.g. read and written)', - Error); -E('ERR_HTTP2_ORIGIN_LENGTH', - 'HTTP/2 ORIGIN frames are limited to 16382 bytes', TypeError); -E('ERR_HTTP2_OUT_OF_STREAMS', - 'No stream ID is available because maximum stream ID has been reached', - Error); -E('ERR_HTTP2_PAYLOAD_FORBIDDEN', - 'Responses with %s status must not have a payload', Error); -E('ERR_HTTP2_PING_CANCEL', 'HTTP2 ping cancelled', Error); -E('ERR_HTTP2_PING_LENGTH', 'HTTP2 ping payload must be 8 bytes', RangeError); -E('ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED', - 'Cannot set HTTP/2 pseudo-headers', TypeError); -E('ERR_HTTP2_PUSH_DISABLED', 'HTTP/2 client has disabled push streams', Error); -E('ERR_HTTP2_SEND_FILE', 'Directories cannot be sent', Error); -E('ERR_HTTP2_SEND_FILE_NOSEEK', - 'Offset or length can only be specified for regular files', Error); -E('ERR_HTTP2_SESSION_ERROR', 'Session closed with error code %s', Error); -E('ERR_HTTP2_SETTINGS_CANCEL', 'HTTP2 session settings canceled', Error); -E('ERR_HTTP2_SOCKET_BOUND', - 'The socket is already bound to an Http2Session', Error); -E('ERR_HTTP2_SOCKET_UNBOUND', - 'The socket has been disconnected from the Http2Session', Error); -E('ERR_HTTP2_STATUS_101', - 'HTTP status code 101 (Switching Protocols) is forbidden in HTTP/2', Error); -E('ERR_HTTP2_STATUS_INVALID', 'Invalid status code: %s', RangeError); -E('ERR_HTTP2_STREAM_CANCEL', function(error) { - let msg = 'The pending stream has been canceled'; - if (error) { - this.cause = error; - if (typeof error.message === 'string') - msg += ` (caused by: ${error.message})`; - } - return msg; -}, Error); -E('ERR_HTTP2_STREAM_ERROR', 'Stream closed with error code %s', Error); -E('ERR_HTTP2_STREAM_SELF_DEPENDENCY', - 'A stream cannot depend on itself', Error); -E('ERR_HTTP2_TOO_MANY_INVALID_FRAMES', 'Too many invalid HTTP/2 frames', Error); -E('ERR_HTTP2_TRAILERS_ALREADY_SENT', - 'Trailing headers have already been sent', Error); -E('ERR_HTTP2_TRAILERS_NOT_READY', - 'Trailing headers cannot be sent until after the wantTrailers event is ' + - 'emitted', Error); -E('ERR_HTTP2_UNSUPPORTED_PROTOCOL', 'protocol "%s" is unsupported.', Error); -E('ERR_HTTP_HEADERS_SENT', - 'Cannot %s headers after they are sent to the client', Error); -E('ERR_HTTP_INVALID_HEADER_VALUE', - 'Invalid value "%s" for header "%s"', TypeError); -E('ERR_HTTP_INVALID_STATUS_CODE', 'Invalid status code: %s', RangeError); -E('ERR_HTTP_REQUEST_TIMEOUT', 'Request timeout', Error); -E('ERR_HTTP_SOCKET_ENCODING', - 'Changing the socket encoding is not allowed per RFC7230 Section 3.', Error); -E('ERR_HTTP_TRAILER_INVALID', - 'Trailers are invalid with this transfer encoding', Error); -E('ERR_ILLEGAL_CONSTRUCTOR', 'Illegal constructor', TypeError); -E('ERR_IMPORT_ASSERTION_TYPE_FAILED', - 'Module "%s" is not of type "%s"', TypeError); -E('ERR_IMPORT_ASSERTION_TYPE_MISSING', - 'Module "%s" needs an import assertion of type "%s"', TypeError); -E('ERR_IMPORT_ASSERTION_TYPE_UNSUPPORTED', - 'Import assertion type "%s" is unsupported', TypeError); -E('ERR_INCOMPATIBLE_OPTION_PAIR', - 'Option "%s" cannot be used in combination with option "%s"', TypeError); -E('ERR_INPUT_TYPE_NOT_ALLOWED', '--input-type can only be used with string ' + - 'input via --eval, --print, or STDIN', Error); -E('ERR_INSPECTOR_ALREADY_ACTIVATED', - 'Inspector is already activated. Close it with inspector.close() ' + - 'before activating it again.', - Error); -E('ERR_INSPECTOR_ALREADY_CONNECTED', '%s is already connected', Error); -E('ERR_INSPECTOR_CLOSED', 'Session was closed', Error); -E('ERR_INSPECTOR_COMMAND', 'Inspector error %d: %s', Error); -E('ERR_INSPECTOR_NOT_ACTIVE', 'Inspector is not active', Error); -E('ERR_INSPECTOR_NOT_AVAILABLE', 'Inspector is not available', Error); -E('ERR_INSPECTOR_NOT_CONNECTED', 'Session is not connected', Error); -E('ERR_INSPECTOR_NOT_WORKER', 'Current thread is not a worker', Error); -E('ERR_INTERNAL_ASSERTION', (message) => { - const suffix = 'This is caused by either a bug in Node.js ' + - 'or incorrect usage of Node.js internals.\n' + - 'Please open an issue with this stack trace at ' + - 'https://github.com/nodejs/node/issues\n'; - return message === undefined ? suffix : `${message}\n${suffix}`; -}, Error); -E('ERR_INVALID_ADDRESS_FAMILY', function(addressType, host, port) { - this.host = host; - this.port = port; - return `Invalid address family: ${addressType} ${host}:${port}`; -}, RangeError); -E('ERR_INVALID_ARG_TYPE', - (name, expected, actual) => { - assert(typeof name === 'string', "'name' must be a string"); - if (!ArrayIsArray(expected)) { - expected = [expected]; - } - - let msg = 'The '; - if (StringPrototypeEndsWith(name, ' argument')) { - // For cases like 'first argument' - msg += `${name} `; - } else { - const type = StringPrototypeIncludes(name, '.') ? 'property' : 'argument'; - msg += `"${name}" ${type} `; - } - msg += 'must be '; - - const types = []; - const instances = []; - const other = []; - - for (const value of expected) { - assert(typeof value === 'string', - 'All expected entries have to be of type string'); - if (ArrayPrototypeIncludes(kTypes, value)) { - ArrayPrototypePush(types, StringPrototypeToLowerCase(value)); - } else if (RegExpPrototypeTest(classRegExp, value)) { - ArrayPrototypePush(instances, value); - } else { - assert(value !== 'object', - 'The value "object" should be written as "Object"'); - ArrayPrototypePush(other, value); - } - } - - // Special handle `object` in case other instances are allowed to outline - // the differences between each other. - if (instances.length > 0) { - const pos = ArrayPrototypeIndexOf(types, 'object'); - if (pos !== -1) { - ArrayPrototypeSplice(types, pos, 1); - ArrayPrototypePush(instances, 'Object'); - } - } - - if (types.length > 0) { - if (types.length > 2) { - const last = ArrayPrototypePop(types); - msg += `one of type ${ArrayPrototypeJoin(types, ', ')}, or ${last}`; - } else if (types.length === 2) { - msg += `one of type ${types[0]} or ${types[1]}`; - } else { - msg += `of type ${types[0]}`; - } - if (instances.length > 0 || other.length > 0) - msg += ' or '; - } - - if (instances.length > 0) { - if (instances.length > 2) { - const last = ArrayPrototypePop(instances); - msg += - `an instance of ${ArrayPrototypeJoin(instances, ', ')}, or ${last}`; - } else { - msg += `an instance of ${instances[0]}`; - if (instances.length === 2) { - msg += ` or ${instances[1]}`; - } - } - if (other.length > 0) - msg += ' or '; - } - - if (other.length > 0) { - if (other.length > 2) { - const last = ArrayPrototypePop(other); - msg += `one of ${ArrayPrototypeJoin(other, ', ')}, or ${last}`; - } else if (other.length === 2) { - msg += `one of ${other[0]} or ${other[1]}`; - } else { - if (StringPrototypeToLowerCase(other[0]) !== other[0]) - msg += 'an '; - msg += `${other[0]}`; - } - } - - if (actual == null) { - msg += `. Received ${actual}`; - } else if (typeof actual === 'function' && actual.name) { - msg += `. Received function ${actual.name}`; - } else if (typeof actual === 'object') { - if (actual.constructor && actual.constructor.name) { - msg += `. Received an instance of ${actual.constructor.name}`; - } else { - const inspected = lazyInternalUtilInspect() - .inspect(actual, { depth: -1 }); - msg += `. Received ${inspected}`; - } - } else { - let inspected = lazyInternalUtilInspect() - .inspect(actual, { colors: false }); - if (inspected.length > 25) - inspected = `${StringPrototypeSlice(inspected, 0, 25)}...`; - msg += `. Received type ${typeof actual} (${inspected})`; - } - return msg; - }, TypeError); -E('ERR_INVALID_ARG_VALUE', (name, value, reason = 'is invalid') => { - let inspected = lazyInternalUtilInspect().inspect(value); - if (inspected.length > 128) { - inspected = `${StringPrototypeSlice(inspected, 0, 128)}...`; - } - const type = StringPrototypeIncludes(name, '.') ? 'property' : 'argument'; - return `The ${type} '${name}' ${reason}. Received ${inspected}`; -}, TypeError, RangeError); -E('ERR_INVALID_ASYNC_ID', 'Invalid %s value: %s', RangeError); -E('ERR_INVALID_BUFFER_SIZE', - 'Buffer size must be a multiple of %s', RangeError); -E('ERR_INVALID_CALLBACK', - 'Callback must be a function. Received %O', TypeError); -E('ERR_INVALID_CHAR', - // Using a default argument here is important so the argument is not counted - // towards `Function#length`. - (name, field = undefined) => { - let msg = `Invalid character in ${name}`; - if (field !== undefined) { - msg += ` ["${field}"]`; - } - return msg; - }, TypeError); -E('ERR_INVALID_CURSOR_POS', - 'Cannot set cursor row without setting its column', TypeError); -E('ERR_INVALID_FD', - '"fd" must be a positive integer: %s', RangeError); -E('ERR_INVALID_FD_TYPE', 'Unsupported fd type: %s', TypeError); -E('ERR_INVALID_FILE_URL_HOST', - 'File URL host must be "localhost" or empty on %s', TypeError); -E('ERR_INVALID_FILE_URL_PATH', 'File URL path %s', TypeError); -E('ERR_INVALID_HANDLE_TYPE', 'This handle type cannot be sent', TypeError); -E('ERR_INVALID_HTTP_TOKEN', '%s must be a valid HTTP token ["%s"]', TypeError); -E('ERR_INVALID_IP_ADDRESS', 'Invalid IP address: %s', TypeError); -E('ERR_INVALID_MODULE_SPECIFIER', (request, reason, base = undefined) => { - return `Invalid module "${request}" ${reason}${base ? - ` imported from ${base}` : ''}`; -}, TypeError); -E('ERR_INVALID_PACKAGE_CONFIG', (path, base, message) => { - return `Invalid package config ${path}${base ? ` while importing ${base}` : - ''}${message ? `. ${message}` : ''}`; -}, Error); -E('ERR_INVALID_PACKAGE_TARGET', - (pkgPath, key, target, isImport = false, base = undefined) => { - const relError = typeof target === 'string' && !isImport && - target.length && !StringPrototypeStartsWith(target, './'); - if (key === '.') { - assert(isImport === false); - return `Invalid "exports" main target ${JSONStringify(target)} defined ` + - `in the package config ${pkgPath}package.json${base ? - ` imported from ${base}` : ''}${relError ? - '; targets must start with "./"' : ''}`; - } - return `Invalid "${isImport ? 'imports' : 'exports'}" target ${ - JSONStringify(target)} defined for '${key}' in the package config ${ - pkgPath}package.json${base ? ` imported from ${base}` : ''}${relError ? - '; targets must start with "./"' : ''}`; - }, Error); -E('ERR_INVALID_PROTOCOL', - 'Protocol "%s" not supported. Expected "%s"', - TypeError); -E('ERR_INVALID_REPL_EVAL_CONFIG', - 'Cannot specify both "breakEvalOnSigint" and "eval" for REPL', TypeError); -E('ERR_INVALID_REPL_INPUT', '%s', TypeError); -E('ERR_INVALID_RETURN_PROPERTY', (input, name, prop, value) => { - return `Expected a valid ${input} to be returned for the "${prop}" from the` + - ` "${name}" function but got ${value}.`; -}, TypeError); -E('ERR_INVALID_RETURN_PROPERTY_VALUE', (input, name, prop, value) => { - let type; - if (value && value.constructor && value.constructor.name) { - type = `instance of ${value.constructor.name}`; - } else { - type = `type ${typeof value}`; - } - return `Expected ${input} to be returned for the "${prop}" from the` + - ` "${name}" function but got ${type}.`; -}, TypeError); -E('ERR_INVALID_RETURN_VALUE', (input, name, value) => { - let type; - if (value && value.constructor && value.constructor.name) { - type = `instance of ${value.constructor.name}`; - } else { - type = `type ${typeof value}`; - } - return `Expected ${input} to be returned from the "${name}"` + - ` function but got ${type}.`; -}, TypeError, RangeError); -E('ERR_INVALID_STATE', 'Invalid state: %s', Error, TypeError, RangeError); -E('ERR_INVALID_SYNC_FORK_INPUT', - 'Asynchronous forks do not support ' + - 'Buffer, TypedArray, DataView or string input: %s', - TypeError); -E('ERR_INVALID_THIS', 'Value of "this" must be of type %s', TypeError); -E('ERR_INVALID_TUPLE', '%s must be an iterable %s tuple', TypeError); -E('ERR_INVALID_URI', 'URI malformed', URIError); -E('ERR_INVALID_URL', function(input) { - this.input = input; - // Don't include URL in message. - // (See https://github.com/nodejs/node/pull/38614) - return 'Invalid URL'; -}, TypeError); -E('ERR_INVALID_URL_SCHEME', - (expected) => { - if (typeof expected === 'string') - expected = [expected]; - assert(expected.length <= 2); - const res = expected.length === 2 ? - `one of scheme ${expected[0]} or ${expected[1]}` : - `of scheme ${expected[0]}`; - return `The URL must be ${res}`; - }, TypeError); -E('ERR_IPC_CHANNEL_CLOSED', 'Channel closed', Error); -E('ERR_IPC_DISCONNECTED', 'IPC channel is already disconnected', Error); -E('ERR_IPC_ONE_PIPE', 'Child process can have only one IPC pipe', Error); -E('ERR_IPC_SYNC_FORK', 'IPC cannot be used with synchronous forks', Error); -E('ERR_MANIFEST_ASSERT_INTEGRITY', - (moduleURL, realIntegrities) => { - let msg = `The content of "${ - moduleURL - }" does not match the expected integrity.`; - if (realIntegrities.size) { - const sri = ArrayPrototypeJoin( - ArrayFrom(realIntegrities.entries(), - ({ 0: alg, 1: dgs }) => `${alg}-${dgs}`), - ' ' - ); - msg += ` Integrities found are: ${sri}`; - } else { - msg += ' The resource was not found in the policy.'; - } - return msg; - }, Error); -E('ERR_MANIFEST_DEPENDENCY_MISSING', - 'Manifest resource %s does not list %s as a dependency specifier for ' + - 'conditions: %s', - Error); -E('ERR_MANIFEST_INTEGRITY_MISMATCH', - 'Manifest resource %s has multiple entries but integrity lists do not match', - SyntaxError); -E('ERR_MANIFEST_INVALID_RESOURCE_FIELD', - 'Manifest resource %s has invalid property value for %s', - TypeError); -E('ERR_MANIFEST_INVALID_SPECIFIER', - 'Manifest resource %s has invalid dependency mapping %s', - TypeError); -E('ERR_MANIFEST_TDZ', 'Manifest initialization has not yet run', Error); -E('ERR_MANIFEST_UNKNOWN_ONERROR', - 'Manifest specified unknown error behavior "%s".', - SyntaxError); -E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error); -E('ERR_MISSING_ARGS', - (...args) => { - assert(args.length > 0, 'At least one arg needs to be specified'); - let msg = 'The '; - const len = args.length; - const wrap = (a) => `"${a}"`; - args = ArrayPrototypeMap( - args, - (a) => (ArrayIsArray(a) ? - ArrayPrototypeJoin(ArrayPrototypeMap(a, wrap), ' or ') : - wrap(a)) - ); - switch (len) { - case 1: - msg += `${args[0]} argument`; - break; - case 2: - msg += `${args[0]} and ${args[1]} arguments`; - break; - default: - msg += ArrayPrototypeJoin(ArrayPrototypeSlice(args, 0, len - 1), ', '); - msg += `, and ${args[len - 1]} arguments`; - break; - } - return `${msg} must be specified`; - }, TypeError); -E('ERR_MISSING_OPTION', '%s is required', TypeError); -E('ERR_MODULE_NOT_FOUND', (path, base, type = 'package') => { - return `Cannot find ${type} '${path}' imported from ${base}`; -}, Error); -E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error); -E('ERR_NAPI_CONS_FUNCTION', 'Constructor must be a function', TypeError); -E('ERR_NAPI_INVALID_DATAVIEW_ARGS', - 'byte_offset + byte_length should be less than or equal to the size in ' + - 'bytes of the array passed in', - RangeError); -E('ERR_NAPI_INVALID_TYPEDARRAY_ALIGNMENT', - 'start offset of %s should be a multiple of %s', RangeError); -E('ERR_NAPI_INVALID_TYPEDARRAY_LENGTH', - 'Invalid typed array length', RangeError); -E('ERR_NETWORK_IMPORT_BAD_RESPONSE', - "import '%s' received a bad response: %s", Error); -E('ERR_NETWORK_IMPORT_DISALLOWED', - "import of '%s' by %s is not supported: %s", Error); -E('ERR_NO_CRYPTO', - 'Node.js is not compiled with OpenSSL crypto support', Error); -E('ERR_NO_ICU', - '%s is not supported on Node.js compiled without ICU', TypeError); -E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error, TypeError); -E('ERR_OUT_OF_RANGE', - (str, range, input, replaceDefaultBoolean = false) => { - assert(range, 'Missing "range" argument'); - let msg = replaceDefaultBoolean ? str : - `The value of "${str}" is out of range.`; - let received; - if (NumberIsInteger(input) && MathAbs(input) > 2 ** 32) { - received = addNumericalSeparator(String(input)); - } else if (typeof input === 'bigint') { - received = String(input); - if (input > 2n ** 32n || input < -(2n ** 32n)) { - received = addNumericalSeparator(received); - } - received += 'n'; - } else { - received = lazyInternalUtilInspect().inspect(input); - } - msg += ` It must be ${range}. Received ${received}`; - return msg; - }, RangeError); -E('ERR_PACKAGE_IMPORT_NOT_DEFINED', (specifier, packagePath, base) => { - return `Package import specifier "${specifier}" is not defined${packagePath ? - ` in package ${packagePath}package.json` : ''} imported from ${base}`; -}, TypeError); -E('ERR_PACKAGE_PATH_NOT_EXPORTED', (pkgPath, subpath, base = undefined) => { - if (subpath === '.') - return `No "exports" main defined in ${pkgPath}package.json${base ? - ` imported from ${base}` : ''}`; - return `Package subpath '${subpath}' is not defined by "exports" in ${ - pkgPath}package.json${base ? ` imported from ${base}` : ''}`; -}, Error); -E('ERR_PERFORMANCE_INVALID_TIMESTAMP', - '%d is not a valid timestamp', TypeError); -E('ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS', '%s', TypeError); -E('ERR_REQUIRE_ESM', - function(filename, hasEsmSyntax, parentPath = null, packageJsonPath = null) { - hideInternalStackFrames(this); - let msg = `require() of ES Module ${filename}${parentPath ? ` from ${ - parentPath}` : ''} not supported.`; - if (!packageJsonPath) { - if (StringPrototypeEndsWith(filename, '.mjs')) - msg += `\nInstead change the require of ${filename} to a dynamic ` + - 'import() which is available in all CommonJS modules.'; - return msg; - } - const path = require('path'); - const basename = parentPath && path.basename(filename) === - path.basename(parentPath) ? filename : path.basename(filename); - if (hasEsmSyntax) { - msg += `\nInstead change the require of ${basename} in ${parentPath} to` + - ' a dynamic import() which is available in all CommonJS modules.'; - return msg; - } - msg += `\n${basename} is treated as an ES module file as it is a .js ` + - 'file whose nearest parent package.json contains "type": "module" ' + - 'which declares all .js files in that package scope as ES modules.' + - `\nInstead rename ${basename} to end in .cjs, change the requiring ` + - 'code to use dynamic import() which is available in all CommonJS ' + - 'modules, or change "type": "module" to "type": "commonjs" in ' + - `${packageJsonPath} to treat all .js files as CommonJS (using .mjs for ` + - 'all ES modules instead).\n'; - return msg; - }, Error); -E('ERR_SCRIPT_EXECUTION_INTERRUPTED', - 'Script execution was interrupted by `SIGINT`', Error); -E('ERR_SERVER_ALREADY_LISTEN', - 'Listen method has been called more than once without closing.', Error); -E('ERR_SERVER_NOT_RUNNING', 'Server is not running.', Error); -E('ERR_SOCKET_ALREADY_BOUND', 'Socket is already bound', Error); -E('ERR_SOCKET_BAD_BUFFER_SIZE', - 'Buffer size must be a positive integer', TypeError); -E('ERR_SOCKET_BAD_PORT', (name, port, allowZero = true) => { - assert(typeof allowZero === 'boolean', - "The 'allowZero' argument must be of type boolean."); - const operator = allowZero ? '>=' : '>'; - return `${name} should be ${operator} 0 and < 65536. Received ${port}.`; -}, RangeError); -E('ERR_SOCKET_BAD_TYPE', - 'Bad socket type specified. Valid types are: udp4, udp6', TypeError); -E('ERR_SOCKET_BUFFER_SIZE', - 'Could not get or set buffer size', - SystemError); -E('ERR_SOCKET_CLOSED', 'Socket is closed', Error); -E('ERR_SOCKET_DGRAM_IS_CONNECTED', 'Already connected', Error); -E('ERR_SOCKET_DGRAM_NOT_CONNECTED', 'Not connected', Error); -E('ERR_SOCKET_DGRAM_NOT_RUNNING', 'Not running', Error); -E('ERR_SRI_PARSE', - 'Subresource Integrity string %j had an unexpected %j at position %d', - SyntaxError); -E('ERR_STREAM_ALREADY_FINISHED', - 'Cannot call %s after a stream was finished', - Error); -E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error); -E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error); -E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error); -E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error); -E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', - 'stream.unshift() after end event', Error); -E('ERR_STREAM_WRAP', 'Stream has StringDecoder set or is in objectMode', Error); -E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error); -E('ERR_SYNTHETIC', 'JavaScript Callstack', Error); -E('ERR_SYSTEM_ERROR', 'A system error occurred', SystemError); -E('ERR_TLS_CERT_ALTNAME_FORMAT', 'Invalid subject alternative name string', - SyntaxError); -E('ERR_TLS_CERT_ALTNAME_INVALID', function(reason, host, cert) { - this.reason = reason; - this.host = host; - this.cert = cert; - return `Hostname/IP does not match certificate's altnames: ${reason}`; -}, Error); -E('ERR_TLS_DH_PARAM_SIZE', 'DH parameter size %s is less than 2048', Error); -E('ERR_TLS_HANDSHAKE_TIMEOUT', 'TLS handshake timeout', Error); -E('ERR_TLS_INVALID_CONTEXT', '%s must be a SecureContext', TypeError); -E('ERR_TLS_INVALID_PROTOCOL_VERSION', - '%j is not a valid %s TLS protocol version', TypeError); -E('ERR_TLS_INVALID_STATE', 'TLS socket connection must be securely established', - Error); -E('ERR_TLS_PROTOCOL_VERSION_CONFLICT', - 'TLS protocol version %j conflicts with secureProtocol %j', TypeError); -E('ERR_TLS_RENEGOTIATION_DISABLED', - 'TLS session renegotiation disabled for this socket', Error); - -// This should probably be a `TypeError`. -E('ERR_TLS_REQUIRED_SERVER_NAME', - '"servername" is required parameter for Server.addContext', Error); -E('ERR_TLS_SESSION_ATTACK', 'TLS session renegotiation attack detected', Error); -E('ERR_TLS_SNI_FROM_SERVER', - 'Cannot issue SNI from a TLS server-side socket', Error); -E('ERR_TRACE_EVENTS_CATEGORY_REQUIRED', - 'At least one category is required', TypeError); -E('ERR_TRACE_EVENTS_UNAVAILABLE', 'Trace events are unavailable', Error); - -// This should probably be a `RangeError`. -E('ERR_TTY_INIT_FAILED', 'TTY initialization failed', SystemError); -E('ERR_UNAVAILABLE_DURING_EXIT', 'Cannot call function in process exit ' + - 'handler', Error); -E('ERR_UNCAUGHT_EXCEPTION_CAPTURE_ALREADY_SET', - '`process.setupUncaughtExceptionCapture()` was called while a capture ' + - 'callback was already active', - Error); -E('ERR_UNESCAPED_CHARACTERS', '%s contains unescaped characters', TypeError); -E('ERR_UNHANDLED_ERROR', - // Using a default argument here is important so the argument is not counted - // towards `Function#length`. - (err = undefined) => { - const msg = 'Unhandled error.'; - if (err === undefined) return msg; - return `${msg} (${err})`; - }, Error); -E('ERR_UNKNOWN_BUILTIN_MODULE', 'No such built-in module: %s', Error); -E('ERR_UNKNOWN_CREDENTIAL', '%s identifier does not exist: %s', Error); -E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError); -E('ERR_UNKNOWN_FILE_EXTENSION', (ext, path, suggestion) => { - let msg = `Unknown file extension "${ext}" for ${path}`; - if (suggestion) { - msg += `. ${suggestion}`; - } - return msg; -}, TypeError); -E('ERR_UNKNOWN_MODULE_FORMAT', 'Unknown module format: %s for URL %s', - RangeError); -E('ERR_UNKNOWN_SIGNAL', 'Unknown signal: %s', TypeError); -E('ERR_UNSUPPORTED_DIR_IMPORT', "Directory import '%s' is not supported " + -'resolving ES modules imported from %s', Error); -E('ERR_UNSUPPORTED_ESM_URL_SCHEME', (url, supported) => { - let msg = `Only URLs with a scheme in: ${ArrayPrototypeJoin(supported, ', ')} are supported by the default ESM loader`; - if (isWindows && url.protocol.length === 2) { - msg += - '. On Windows, absolute paths must be valid file:// URLs'; - } - msg += `. Received protocol '${url.protocol}'`; - return msg; -}, Error); - -// This should probably be a `TypeError`. -E('ERR_VALID_PERFORMANCE_ENTRY_TYPE', - 'At least one valid performance entry type is required', Error); -E('ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING', - 'A dynamic import callback was not specified.', TypeError); -E('ERR_VM_MODULE_ALREADY_LINKED', 'Module has already been linked', Error); -E('ERR_VM_MODULE_CANNOT_CREATE_CACHED_DATA', - 'Cached data cannot be created for a module which has been evaluated', Error); -E('ERR_VM_MODULE_DIFFERENT_CONTEXT', - 'Linked modules must use the same context', Error); -E('ERR_VM_MODULE_LINKING_ERRORED', - 'Linking has already failed for the provided module', Error); -E('ERR_VM_MODULE_NOT_MODULE', - 'Provided module is not an instance of Module', Error); -E('ERR_VM_MODULE_STATUS', 'Module status %s', Error); -E('ERR_WASI_ALREADY_STARTED', 'WASI instance has already started', Error); -E('ERR_WORKER_INIT_FAILED', 'Worker initialization failure: %s', Error); -E('ERR_WORKER_INVALID_EXEC_ARGV', (errors, msg = 'invalid execArgv flags') => - `Initiated Worker with ${msg}: ${ArrayPrototypeJoin(errors, ', ')}`, - Error); -E('ERR_WORKER_NOT_RUNNING', 'Worker instance not running', Error); -E('ERR_WORKER_OUT_OF_MEMORY', - 'Worker terminated due to reaching memory limit: %s', Error); -E('ERR_WORKER_PATH', (filename) => - 'The worker script or module filename must be an absolute path or a ' + - 'relative path starting with \'./\' or \'../\'.' + - (StringPrototypeStartsWith(filename, 'file://') ? - ' Wrap file:// URLs with `new URL`.' : '' - ) + - (StringPrototypeStartsWith(filename, 'data:text/javascript') ? - ' Wrap data: URLs with `new URL`.' : '' - ) + - ` Received "${filename}"`, - TypeError); -E('ERR_WORKER_UNSERIALIZABLE_ERROR', - 'Serializing an uncaught exception failed', Error); -E('ERR_WORKER_UNSUPPORTED_OPERATION', - '%s is not supported in workers', TypeError); -E('ERR_ZLIB_INITIALIZATION_FAILED', 'Initialization failed', Error); diff --git a/lib/internal/inspect-browser.js b/lib/internal/inspect-browser.js deleted file mode 100644 index 7eb1bb42c4..0000000000 --- a/lib/internal/inspect-browser.js +++ /dev/null @@ -1,2299 +0,0 @@ -'use strict'; - -const { - Array, - ArrayIsArray, - ArrayPrototypeFilter, - ArrayPrototypeForEach, - ArrayPrototypePop, - ArrayPrototypePush, - ArrayPrototypePushApply, - ArrayPrototypeSort, - ArrayPrototypeUnshift, - BigIntPrototypeValueOf, - BooleanPrototypeValueOf, - DatePrototypeGetTime, - DatePrototypeToISOString, - DatePrototypeToString, - ErrorPrototypeToString, - FunctionPrototypeCall, - FunctionPrototypeToString, - JSONStringify, - MapPrototypeGetSize, - MapPrototypeEntries, - MathFloor, - MathMax, - MathMin, - MathRound, - MathSqrt, - MathTrunc, - Number, - NumberIsFinite, - NumberIsNaN, - NumberParseFloat, - NumberParseInt, - NumberPrototypeValueOf, - Object, - ObjectAssign, - ObjectCreate, - ObjectDefineProperty, - ObjectGetOwnPropertyDescriptor, - ObjectGetOwnPropertyNames, - ObjectGetOwnPropertySymbols, - ObjectGetPrototypeOf, - ObjectIs, - ObjectKeys, - ObjectPrototypeHasOwnProperty, - ObjectPrototypePropertyIsEnumerable, - ObjectSeal, - ObjectSetPrototypeOf, - ReflectOwnKeys, - RegExp, - RegExpPrototypeTest, - RegExpPrototypeToString, - SafeStringIterator, - SafeMap, - SafeSet, - SetPrototypeGetSize, - SetPrototypeValues, - String, - StringPrototypeCharCodeAt, - StringPrototypeCodePointAt, - StringPrototypeIncludes, - StringPrototypeNormalize, - StringPrototypePadEnd, - StringPrototypePadStart, - StringPrototypeRepeat, - StringPrototypeReplace, - StringPrototypeSlice, - StringPrototypeSplit, - StringPrototypeToLowerCase, - StringPrototypeTrim, - StringPrototypeValueOf, - SymbolPrototypeToString, - SymbolPrototypeValueOf, - SymbolIterator, - SymbolToStringTag, - TypedArrayPrototypeGetLength, - TypedArrayPrototypeGetSymbolToStringTag, - Uint8Array, - globalThis, - uncurryThis, -} = require('./primordials'); - -const { - getOwnNonIndexProperties, - getPromiseDetails, - getProxyDetails, - kPending, - kRejected, - previewEntries, - getConstructorName: internalGetConstructorName, - getExternalValue, - propertyFilter: { - ALL_PROPERTIES, - ONLY_ENUMERABLE - } -} = require('../util'); - -const { - customInspectSymbol, - isError, - join, - removeColors -} = require('../util'); - -const { - codes: { - ERR_INVALID_ARG_TYPE - }, - isStackOverflowError -} = require('./errors'); - -const { - isAsyncFunction, - isGeneratorFunction, - isAnyArrayBuffer, - isArrayBuffer, - isArgumentsObject, - isBoxedPrimitive, - isDataView, - isExternal, - isMap, - isMapIterator, - isModuleNamespaceObject, - isNativeError, - isPromise, - isSet, - isSetIterator, - isWeakMap, - isWeakSet, - isRegExp, - isDate, - isTypedArray, - isStringObject, - isNumberObject, - isBooleanObject, - isBigIntObject, -} = require('../util'); - -const assert = require('assert'); - -const { NativeModule } = - { - NativeModule: { - exists() { - return false; - } - } - } - -const { - validateObject, - validateString, -} = require('./validators'); - -let hexSlice; - -const builtInObjects = new SafeSet( - ArrayPrototypeFilter( - ObjectGetOwnPropertyNames(globalThis), - (e) => RegExpPrototypeTest(/^[A-Z][a-zA-Z0-9]+$/, e) - ) -); - -// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot -const isUndetectableObject = (v) => typeof v === 'undefined' && v !== undefined; - -// These options must stay in sync with `getUserOptions`. So if any option will -// be added or removed, `getUserOptions` must also be updated accordingly. -const inspectDefaultOptions = ObjectSeal({ - showHidden: false, - depth: 2, - colors: false, - customInspect: true, - showProxy: false, - maxArrayLength: 100, - maxStringLength: 10000, - breakLength: 80, - compact: 3, - sorted: false, - getters: false, - numericSeparator: false, -}); - -const kObjectType = 0; -const kArrayType = 1; -const kArrayExtrasType = 2; - -/* eslint-disable no-control-regex */ -const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c\x7f-\x9f]/; -const strEscapeSequencesReplacer = /[\x00-\x1f\x27\x5c\x7f-\x9f]/g -const strEscapeSequencesRegExpSingle = /[\x00-\x1f\x5c\x7f-\x9f]/; -const strEscapeSequencesReplacerSingle = /[\x00-\x1f\x5c\x7f-\x9f]/g; -/* eslint-enable no-control-regex */ - -const keyStrRegExp = /^[a-zA-Z_][a-zA-Z_0-9]*$/; -const numberRegExp = /^(0|[1-9][0-9]*)$/; - -const coreModuleRegExp = /^ {4}at (?:[^/\\(]+ \(|)node:(.+):\d+:\d+\)?$/; -const nodeModulesRegExp = /[/\\]node_modules[/\\](.+?)(?:[/\\])/g; - -const classRegExp = /^(\s+[^(]*?)\s*{/; -// eslint-disable-next-line node-core/no-unescaped-regexp-dot -const stripCommentsRegExp = /(\/\/.*?\n)|(\/\*(.|\n)*?\*\/)/g; - -const kMinLineLength = 16; - -// Constants to map the iterator state. -const kWeak = 0; -const kIterator = 1; -const kMapEntries = 2; - -// Escaped control characters (plus the single quote and the backslash). Use -// empty strings to fill up unused entries. -const meta = [ - '\\x00', '\\x01', '\\x02', '\\x03', '\\x04', '\\x05', '\\x06', '\\x07', // x07 - '\\b', '\\t', '\\n', '\\x0B', '\\f', '\\r', '\\x0E', '\\x0F', // x0F - '\\x10', '\\x11', '\\x12', '\\x13', '\\x14', '\\x15', '\\x16', '\\x17', // x17 - '\\x18', '\\x19', '\\x1A', '\\x1B', '\\x1C', '\\x1D', '\\x1E', '\\x1F', // x1F - '', '', '', '', '', '', '', "\\'", '', '', '', '', '', '', '', '', // x2F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x3F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x4F - '', '', '', '', '', '', '', '', '', '', '', '', '\\\\', '', '', '', // x5F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', // x6F - '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '\\x7F', // x7F - '\\x80', '\\x81', '\\x82', '\\x83', '\\x84', '\\x85', '\\x86', '\\x87', // x87 - '\\x88', '\\x89', '\\x8A', '\\x8B', '\\x8C', '\\x8D', '\\x8E', '\\x8F', // x8F - '\\x90', '\\x91', '\\x92', '\\x93', '\\x94', '\\x95', '\\x96', '\\x97', // x97 - '\\x98', '\\x99', '\\x9A', '\\x9B', '\\x9C', '\\x9D', '\\x9E', '\\x9F', // x9F -]; - -// Regex used for ansi escape code splitting -// Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js -// License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore -// Matches all ansi escape code sequences in a string -const ansiPattern = '[\\u001B\\u009B][[\\]()#;?]*' + - '(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*' + - '|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)' + - '|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'; -const ansi = new RegExp(ansiPattern, 'g'); - -let getStringWidth; - -function getUserOptions(ctx, isCrossContext) { - const ret = { - stylize: ctx.stylize, - showHidden: ctx.showHidden, - depth: ctx.depth, - colors: ctx.colors, - customInspect: ctx.customInspect, - showProxy: ctx.showProxy, - maxArrayLength: ctx.maxArrayLength, - maxStringLength: ctx.maxStringLength, - breakLength: ctx.breakLength, - compact: ctx.compact, - sorted: ctx.sorted, - getters: ctx.getters, - numericSeparator: ctx.numericSeparator, - ...ctx.userOptions - }; - - // Typically, the target value will be an instance of `Object`. If that is - // *not* the case, the object may come from another vm.Context, and we want - // to avoid passing it objects from this Context in that case, so we remove - // the prototype from the returned object itself + the `stylize()` function, - // and remove all other non-primitives, including non-primitive user options. - if (isCrossContext) { - ObjectSetPrototypeOf(ret, null); - for (const key of ObjectKeys(ret)) { - if ((typeof ret[key] === 'object' || typeof ret[key] === 'function') && - ret[key] !== null) { - delete ret[key]; - } - } - ret.stylize = ObjectSetPrototypeOf((value, flavour) => { - let stylized; - try { - stylized = `${ctx.stylize(value, flavour)}`; - } catch { - // Continue regardless of error. - } - - if (typeof stylized !== 'string') return value; - // `stylized` is a string as it should be, which is safe to pass along. - return stylized; - }, null); - } - - return ret; -} - -/** - * Echos the value of any input. Tries to print the value out - * in the best way possible given the different types. - * - * @param {any} value The value to print out. - * @param {object} opts Optional options object that alters the output. - */ -/* Legacy: value, showHidden, depth, colors */ -function inspect(value, opts) { - // Default options - const ctx = { - budget: {}, - indentationLvl: 0, - seen: [], - currentDepth: 0, - stylize: stylizeNoColor, - showHidden: inspectDefaultOptions.showHidden, - depth: inspectDefaultOptions.depth, - colors: inspectDefaultOptions.colors, - customInspect: inspectDefaultOptions.customInspect, - showProxy: inspectDefaultOptions.showProxy, - maxArrayLength: inspectDefaultOptions.maxArrayLength, - maxStringLength: inspectDefaultOptions.maxStringLength, - breakLength: inspectDefaultOptions.breakLength, - compact: inspectDefaultOptions.compact, - sorted: inspectDefaultOptions.sorted, - getters: inspectDefaultOptions.getters, - numericSeparator: inspectDefaultOptions.numericSeparator, - }; - if (arguments.length > 1) { - // Legacy... - if (arguments.length > 2) { - if (arguments[2] !== undefined) { - ctx.depth = arguments[2]; - } - if (arguments.length > 3 && arguments[3] !== undefined) { - ctx.colors = arguments[3]; - } - } - // Set user-specified options - if (typeof opts === 'boolean') { - ctx.showHidden = opts; - } else if (opts) { - const optKeys = ObjectKeys(opts); - for (let i = 0; i < optKeys.length; ++i) { - const key = optKeys[i]; - // TODO(BridgeAR): Find a solution what to do about stylize. Either make - // this function public or add a new API with a similar or better - // functionality. - if ( - ObjectPrototypeHasOwnProperty(inspectDefaultOptions, key) || - key === 'stylize') { - ctx[key] = opts[key]; - } else if (ctx.userOptions === undefined) { - // This is required to pass through the actual user input. - ctx.userOptions = opts; - } - } - } - } - if (ctx.colors) ctx.stylize = stylizeWithColor; - if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; - if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - return formatValue(ctx, value, 0); -} -inspect.custom = customInspectSymbol; - -ObjectDefineProperty(inspect, 'defaultOptions', { - get() { - return inspectDefaultOptions; - }, - set(options) { - validateObject(options, 'options'); - return ObjectAssign(inspectDefaultOptions, options); - } -}); - -// Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics -// Each color consists of an array with the color code as first entry and the -// reset code as second entry. -const defaultFG = 39; -const defaultBG = 49; -inspect.colors = ObjectAssign(ObjectCreate(null), { - reset: [0, 0], - bold: [1, 22], - dim: [2, 22], // Alias: faint - italic: [3, 23], - underline: [4, 24], - blink: [5, 25], - // Swap foreground and background colors - inverse: [7, 27], // Alias: swapcolors, swapColors - hidden: [8, 28], // Alias: conceal - strikethrough: [9, 29], // Alias: strikeThrough, crossedout, crossedOut - doubleunderline: [21, 24], // Alias: doubleUnderline - black: [30, defaultFG], - red: [31, defaultFG], - green: [32, defaultFG], - yellow: [33, defaultFG], - blue: [34, defaultFG], - magenta: [35, defaultFG], - cyan: [36, defaultFG], - white: [37, defaultFG], - bgBlack: [40, defaultBG], - bgRed: [41, defaultBG], - bgGreen: [42, defaultBG], - bgYellow: [43, defaultBG], - bgBlue: [44, defaultBG], - bgMagenta: [45, defaultBG], - bgCyan: [46, defaultBG], - bgWhite: [47, defaultBG], - framed: [51, 54], - overlined: [53, 55], - gray: [90, defaultFG], // Alias: grey, blackBright - redBright: [91, defaultFG], - greenBright: [92, defaultFG], - yellowBright: [93, defaultFG], - blueBright: [94, defaultFG], - magentaBright: [95, defaultFG], - cyanBright: [96, defaultFG], - whiteBright: [97, defaultFG], - bgGray: [100, defaultBG], // Alias: bgGrey, bgBlackBright - bgRedBright: [101, defaultBG], - bgGreenBright: [102, defaultBG], - bgYellowBright: [103, defaultBG], - bgBlueBright: [104, defaultBG], - bgMagentaBright: [105, defaultBG], - bgCyanBright: [106, defaultBG], - bgWhiteBright: [107, defaultBG], -}); - -function defineColorAlias(target, alias) { - ObjectDefineProperty(inspect.colors, alias, { - get() { - return this[target]; - }, - set(value) { - this[target] = value; - }, - configurable: true, - enumerable: false - }); -} - -defineColorAlias('gray', 'grey'); -defineColorAlias('gray', 'blackBright'); -defineColorAlias('bgGray', 'bgGrey'); -defineColorAlias('bgGray', 'bgBlackBright'); -defineColorAlias('dim', 'faint'); -defineColorAlias('strikethrough', 'crossedout'); -defineColorAlias('strikethrough', 'strikeThrough'); -defineColorAlias('strikethrough', 'crossedOut'); -defineColorAlias('hidden', 'conceal'); -defineColorAlias('inverse', 'swapColors'); -defineColorAlias('inverse', 'swapcolors'); -defineColorAlias('doubleunderline', 'doubleUnderline'); - -// TODO(BridgeAR): Add function style support for more complex styles. -// Don't use 'blue' not visible on cmd.exe -inspect.styles = ObjectAssign(ObjectCreate(null), { - special: 'cyan', - number: 'yellow', - bigint: 'yellow', - boolean: 'yellow', - undefined: 'grey', - null: 'bold', - string: 'green', - symbol: 'green', - date: 'magenta', - // "name": intentionally not styling - // TODO(BridgeAR): Highlight regular expressions properly. - regexp: 'red', - module: 'underline' -}); - -function addQuotes(str, quotes) { - if (quotes === -1) { - return `"${str}"`; - } - if (quotes === -2) { - return `\`${str}\``; - } - return `'${str}'`; -} - -function escapeFn(str) { - const charCode = StringPrototypeCharCodeAt(str); - return meta.length > charCode ? meta[charCode] : `\\u${charCode.toString(16)}`; -} - -// Escape control characters, single quotes and the backslash. -// This is similar to JSON stringify escaping. -function strEscape(str) { - let escapeTest = strEscapeSequencesRegExp; - let escapeReplace = strEscapeSequencesReplacer; - let singleQuote = 39; - - // Check for double quotes. If not present, do not escape single quotes and - // instead wrap the text in double quotes. If double quotes exist, check for - // backticks. If they do not exist, use those as fallback instead of the - // double quotes. - if (StringPrototypeIncludes(str, "'")) { - // This invalidates the charCode and therefore can not be matched for - // anymore. - if (!StringPrototypeIncludes(str, '"')) { - singleQuote = -1; - } else if (!StringPrototypeIncludes(str, '`') && - !StringPrototypeIncludes(str, '${')) { - singleQuote = -2; - } - if (singleQuote !== 39) { - escapeTest = strEscapeSequencesRegExpSingle; - escapeReplace = strEscapeSequencesReplacerSingle; - } - } - - // Some magic numbers that worked out fine while benchmarking with v8 6.0 - if (str.length < 5000 && !RegExpPrototypeTest(escapeTest, str)) - return addQuotes(str, singleQuote); - if (str.length > 100) { - str = StringPrototypeReplace(str, escapeReplace, escapeFn); - return addQuotes(str, singleQuote); - } - - let result = ''; - let last = 0; - for (let i = 0; i < str.length; i++) { - const point = StringPrototypeCharCodeAt(str, i); - if (point === singleQuote || - point === 92 || - point < 32 || - (point > 126 && point < 160)) { - if (last === i) { - result += meta[point]; - } else { - result += `${StringPrototypeSlice(str, last, i)}${meta[point]}`; - } - last = i + 1; - } else if (point >= 0xd800 && point <= 0xdfff) { - if (point <= 0xdbff && i + 1 < str.length) { - const point = StringPrototypeCharCodeAt(str, i + 1); - if (point >= 0xdc00 && point <= 0xdfff) { - i++; - continue; - } - } - result += `${StringPrototypeSlice(str, last, i)}${`\\u${point.toString(16)}`}`; - last = i + 1; - } - } - - if (last !== str.length) { - result += StringPrototypeSlice(str, last); - } - return addQuotes(result, singleQuote); -} - -function stylizeWithColor(str, styleType) { - const style = inspect.styles[styleType]; - if (style !== undefined) { - const color = inspect.colors[style]; - if (color !== undefined) - return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; - } - return str; -} - -function stylizeNoColor(str) { - return str; -} - -// Return a new empty array to push in the results of the default formatter. -function getEmptyFormatArray() { - return []; -} - -function isInstanceof(object, proto) { - try { - return object instanceof proto; - } catch { - return false; - } -} - -function getConstructorName(obj, ctx, recurseTimes, protoProps) { - let firstProto; - const tmp = obj; - while (obj || isUndetectableObject(obj)) { - const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); - if (descriptor !== undefined && - typeof descriptor.value === 'function' && - descriptor.value.name !== '' && - isInstanceof(tmp, descriptor.value)) { - if (protoProps !== undefined && - (firstProto !== obj || - !builtInObjects.has(descriptor.value.name))) { - addPrototypeProperties( - ctx, tmp, firstProto || tmp, recurseTimes, protoProps); - } - return descriptor.value.name; - } - - obj = ObjectGetPrototypeOf(obj); - if (firstProto === undefined) { - firstProto = obj; - } - } - - if (firstProto === null) { - return null; - } - - const res = internalGetConstructorName(tmp); - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - return `${res} `; - } - - const protoConstr = getConstructorName( - firstProto, ctx, recurseTimes + 1, protoProps); - - if (protoConstr === null) { - return `${res} <${inspect(firstProto, { - ...ctx, - customInspect: false, - depth: -1 - })}>`; - } - - return `${res} <${protoConstr}>`; -} - -// This function has the side effect of adding prototype properties to the -// `output` argument (which is an array). This is intended to highlight user -// defined prototype properties. -function addPrototypeProperties(ctx, main, obj, recurseTimes, output) { - let depth = 0; - let keys; - let keySet; - do { - if (depth !== 0 || main === obj) { - obj = ObjectGetPrototypeOf(obj); - // Stop as soon as a null prototype is encountered. - if (obj === null) { - return; - } - // Stop as soon as a built-in object type is detected. - const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); - if (descriptor !== undefined && - typeof descriptor.value === 'function' && - builtInObjects.has(descriptor.value.name)) { - return; - } - } - - if (depth === 0) { - keySet = new SafeSet(); - } else { - ArrayPrototypeForEach(keys, (key) => keySet.add(key)); - } - // Get all own property names and symbols. - keys = ReflectOwnKeys(obj); - ArrayPrototypePush(ctx.seen, main); - for (const key of keys) { - // Ignore the `constructor` property and keys that exist on layers above. - if (key === 'constructor' || - ObjectPrototypeHasOwnProperty(main, key) || - (depth !== 0 && keySet.has(key))) { - continue; - } - const desc = ObjectGetOwnPropertyDescriptor(obj, key); - if (typeof desc.value === 'function') { - continue; - } - const value = formatProperty( - ctx, obj, recurseTimes, key, kObjectType, desc, main); - if (ctx.colors) { - // Faint! - ArrayPrototypePush(output, `\u001b[2m${value}\u001b[22m`); - } else { - ArrayPrototypePush(output, value); - } - } - ArrayPrototypePop(ctx.seen); - // Limit the inspection to up to three prototype layers. Using `recurseTimes` - // is not a good choice here, because it's as if the properties are declared - // on the current object from the users perspective. - } while (++depth !== 3); -} - -function getPrefix(constructor, tag, fallback, size = '') { - if (constructor === null) { - if (tag !== '' && fallback !== tag) { - return `[${fallback}${size}: null prototype] [${tag}] `; - } - return `[${fallback}${size}: null prototype] `; - } - - if (tag !== '' && constructor !== tag) { - return `${constructor}${size} [${tag}] `; - } - return `${constructor}${size} `; -} - -// Look up the keys of the object. -function getKeys(value, showHidden) { - let keys; - const symbols = ObjectGetOwnPropertySymbols(value); - if (showHidden) { - keys = ObjectGetOwnPropertyNames(value); - if (symbols.length !== 0) - ArrayPrototypePushApply(keys, symbols); - } else { - // This might throw if `value` is a Module Namespace Object from an - // unevaluated module, but we don't want to perform the actual type - // check because it's expensive. - // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 - // and modify this logic as needed. - try { - keys = ObjectKeys(value); - } catch (err) { - assert(isNativeError(err) && err.name === 'ReferenceError' && - isModuleNamespaceObject(value)); - keys = ObjectGetOwnPropertyNames(value); - } - if (symbols.length !== 0) { - const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); - ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter)); - } - } - return keys; -} - -function getCtxStyle(value, constructor, tag) { - let fallback = ''; - if (constructor === null) { - fallback = internalGetConstructorName(value); - if (fallback === tag) { - fallback = 'Object'; - } - } - return getPrefix(constructor, tag, fallback); -} - -function formatProxy(ctx, proxy, recurseTimes) { - if (recurseTimes > ctx.depth && ctx.depth !== null) { - return ctx.stylize('Proxy [Array]', 'special'); - } - recurseTimes += 1; - ctx.indentationLvl += 2; - const res = [ - formatValue(ctx, proxy[0], recurseTimes), - formatValue(ctx, proxy[1], recurseTimes), - ]; - ctx.indentationLvl -= 2; - return reduceToSingleString( - ctx, res, '', ['Proxy [', ']'], kArrayExtrasType, recurseTimes); -} - -// Note: using `formatValue` directly requires the indentation level to be -// corrected by setting `ctx.indentationLvL += diff` and then to decrease the -// value afterwards again. -function formatValue(ctx, value, recurseTimes, typedArray) { - // Primitive types cannot have properties. - if (typeof value !== 'object' && - typeof value !== 'function' && - !isUndetectableObject(value)) { - return formatPrimitive(ctx.stylize, value, ctx); - } - if (value === null) { - return ctx.stylize('null', 'null'); - } - - // Memorize the context for custom inspection on proxies. - const context = value; - // Always check for proxies to prevent side effects and to prevent triggering - // any proxy handlers. - const proxy = getProxyDetails(value, !!ctx.showProxy); - if (proxy !== undefined) { - if (ctx.showProxy) { - return formatProxy(ctx, proxy, recurseTimes); - } - value = proxy; - } - - // Provide a hook for user-specified inspect functions. - // Check that value is an object with an inspect function on it. - if (ctx.customInspect) { - const maybeCustom = value[customInspectSymbol]; - if (typeof maybeCustom === 'function' && - // Filter out the util module, its inspect function is special. - maybeCustom !== inspect && - // Also filter out any prototype objects using the circular check. - !(value.constructor && value.constructor.prototype === value)) { - // This makes sure the recurseTimes are reported as before while using - // a counter internally. - const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; - const isCrossContext = - proxy !== undefined || !(context instanceof Object); - const ret = FunctionPrototypeCall( - maybeCustom, - context, - depth, - getUserOptions(ctx, isCrossContext), - inspect - ); - // If the custom inspection method returned `this`, don't go into - // infinite recursion. - if (ret !== context) { - if (typeof ret !== 'string') { - return formatValue(ctx, ret, recurseTimes); - } - return ret.replace(/\n/g, `\n${' '.repeat(ctx.indentationLvl)}`); - } - } - } - - // Using an array here is actually better for the average case than using - // a Set. `seen` will only check for the depth and will never grow too large. - if (ctx.seen.includes(value)) { - let index = 1; - if (ctx.circular === undefined) { - ctx.circular = new SafeMap(); - ctx.circular.set(value, index); - } else { - index = ctx.circular.get(value); - if (index === undefined) { - index = ctx.circular.size + 1; - ctx.circular.set(value, index); - } - } - return ctx.stylize(`[Circular *${index}]`, 'special'); - } - - return formatRaw(ctx, value, recurseTimes, typedArray); -} - -function formatRaw(ctx, value, recurseTimes, typedArray) { - let keys; - let protoProps; - if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { - protoProps = []; - } - - const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); - // Reset the variable to check for this later on. - if (protoProps !== undefined && protoProps.length === 0) { - protoProps = undefined; - } - - let tag = value[SymbolToStringTag]; - // Only list the tag in case it's non-enumerable / not an own property. - // Otherwise we'd print this twice. - if (typeof tag !== 'string' || - (tag !== '' && - (ctx.showHidden ? - ObjectPrototypeHasOwnProperty : - ObjectPrototypePropertyIsEnumerable)( - value, SymbolToStringTag - ))) { - tag = ''; - } - let base = ''; - let formatter = getEmptyFormatArray; - let braces; - let noIterator = true; - let i = 0; - const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; - - let extrasType = kObjectType; - - // Iterators and the rest are split to reduce checks. - // We have to check all values in case the constructor is set to null. - // Otherwise it would not possible to identify all types properly. - if (value[SymbolIterator] || constructor === null) { - noIterator = false; - if (ArrayIsArray(value)) { - // Only set the constructor for non ordinary ("Array [...]") arrays. - const prefix = (constructor !== 'Array' || tag !== '') ? - getPrefix(constructor, tag, 'Array', `(${value.length})`) : - ''; - keys = getOwnNonIndexProperties(value, filter); - braces = [`${prefix}[`, ']']; - if (value.length === 0 && keys.length === 0 && protoProps === undefined) - return `${braces[0]}]`; - extrasType = kArrayExtrasType; - formatter = formatArray; - } else if (isSet(value)) { - const size = SetPrototypeGetSize(value); - const prefix = getPrefix(constructor, tag, 'Set', `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null ? - formatSet.bind(null, value) : - formatSet.bind(null, SetPrototypeValues(value)); - if (size === 0 && keys.length === 0 && protoProps === undefined) - return `${prefix}{}`; - braces = [`${prefix}{`, '}']; - } else if (isMap(value)) { - const size = MapPrototypeGetSize(value); - const prefix = getPrefix(constructor, tag, 'Map', `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null ? - formatMap.bind(null, value) : - formatMap.bind(null, MapPrototypeEntries(value)); - if (size === 0 && keys.length === 0 && protoProps === undefined) - return `${prefix}{}`; - braces = [`${prefix}{`, '}']; - } else if (isTypedArray(value)) { - keys = getOwnNonIndexProperties(value, filter); - let bound = value; - let fallback = ''; - if (constructor === null) { - fallback = TypedArrayPrototypeGetSymbolToStringTag(value); - // Reconstruct the array information. - bound = new primordials[fallback](value); - } - const size = TypedArrayPrototypeGetLength(value); - const prefix = getPrefix(constructor, tag, fallback, `(${size})`); - braces = [`${prefix}[`, ']']; - if (value.length === 0 && keys.length === 0 && !ctx.showHidden) - return `${braces[0]}]`; - // Special handle the value. The original value is required below. The - // bound function is required to reconstruct missing information. - formatter = formatTypedArray.bind(null, bound, size); - extrasType = kArrayExtrasType; - } else if (isMapIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces('Map', tag); - // Add braces to the formatter parameters. - formatter = formatIterator.bind(null, braces); - } else if (isSetIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces('Set', tag); - // Add braces to the formatter parameters. - formatter = formatIterator.bind(null, braces); - } else { - noIterator = true; - } - } - if (noIterator) { - keys = getKeys(value, ctx.showHidden); - braces = ['{', '}']; - if (constructor === 'Object') { - if (isArgumentsObject(value)) { - braces[0] = '[Arguments] {'; - } else if (tag !== '') { - braces[0] = `${getPrefix(constructor, tag, 'Object')}{`; - } - if (keys.length === 0 && protoProps === undefined) { - return `${braces[0]}}`; - } - } else if (typeof value === 'function') { - base = getFunctionBase(value, constructor, tag); - if (keys.length === 0 && protoProps === undefined) - return ctx.stylize(base, 'special'); - } else if (isRegExp(value)) { - // Make RegExps say that they are RegExps - base = RegExpPrototypeToString( - constructor !== null ? value : new RegExp(value) - ); - const prefix = getPrefix(constructor, tag, 'RegExp'); - if (prefix !== 'RegExp ') - base = `${prefix}${base}`; - if ((keys.length === 0 && protoProps === undefined) || - (recurseTimes > ctx.depth && ctx.depth !== null)) { - return ctx.stylize(base, 'regexp'); - } - } else if (isDate(value)) { - // Make dates with properties first say the date - base = NumberIsNaN(DatePrototypeGetTime(value)) ? - DatePrototypeToString(value) : - DatePrototypeToISOString(value); - const prefix = getPrefix(constructor, tag, 'Date'); - if (prefix !== 'Date ') - base = `${prefix}${base}`; - if (keys.length === 0 && protoProps === undefined) { - return ctx.stylize(base, 'date'); - } - } else if (isError(value)) { - base = formatError(value, constructor, tag, ctx, keys); - if (keys.length === 0 && protoProps === undefined) - return base; - } else if (isAnyArrayBuffer(value)) { - // Fast path for ArrayBuffer and SharedArrayBuffer. - // Can't do the same for DataView because it has a non-primitive - // .buffer property that we need to recurse for. - const arrayType = isArrayBuffer(value) ? 'ArrayBuffer' : - 'SharedArrayBuffer'; - const prefix = getPrefix(constructor, tag, arrayType); - if (typedArray === undefined) { - formatter = formatArrayBuffer; - } else if (keys.length === 0 && protoProps === undefined) { - return prefix + - `{ byteLength: ${formatNumber(ctx.stylize, value.byteLength, false)} }`; - } - braces[0] = `${prefix}{`; - ArrayPrototypeUnshift(keys, 'byteLength'); - } else if (isDataView(value)) { - braces[0] = `${getPrefix(constructor, tag, 'DataView')}{`; - // .buffer goes last, it's not a primitive like the others. - ArrayPrototypeUnshift(keys, 'byteLength', 'byteOffset', 'buffer'); - } else if (isPromise(value)) { - braces[0] = `${getPrefix(constructor, tag, 'Promise')}{`; - formatter = formatPromise; - } else if (isWeakSet(value)) { - braces[0] = `${getPrefix(constructor, tag, 'WeakSet')}{`; - formatter = ctx.showHidden ? formatWeakSet : formatWeakCollection; - } else if (isWeakMap(value)) { - braces[0] = `${getPrefix(constructor, tag, 'WeakMap')}{`; - formatter = ctx.showHidden ? formatWeakMap : formatWeakCollection; - } else if (isModuleNamespaceObject(value)) { - braces[0] = `${getPrefix(constructor, tag, 'Module')}{`; - // Special handle keys for namespace objects. - formatter = formatNamespaceObject.bind(null, keys); - } else if (isBoxedPrimitive(value)) { - base = getBoxedBase(value, ctx, keys, constructor, tag); - if (keys.length === 0 && protoProps === undefined) { - return base; - } - } else { - if (keys.length === 0 && protoProps === undefined) { - if (isExternal(value)) { - const address = getExternalValue(value).toString(16); - return ctx.stylize(`[External: ${address}]`, 'special'); - } - return `${getCtxStyle(value, constructor, tag)}{}`; - } - braces[0] = `${getCtxStyle(value, constructor, tag)}{`; - } - } - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - let constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - if (constructor !== null) - constructorName = `[${constructorName}]`; - return ctx.stylize(constructorName, 'special'); - } - recurseTimes += 1; - - ctx.seen.push(value); - ctx.currentDepth = recurseTimes; - let output; - const indentationLvl = ctx.indentationLvl; - try { - output = formatter(ctx, value, recurseTimes); - for (i = 0; i < keys.length; i++) { - output.push( - formatProperty(ctx, value, recurseTimes, keys[i], extrasType)); - } - if (protoProps !== undefined) { - output.push(...protoProps); - } - } catch (err) { - const constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); - } - if (ctx.circular !== undefined) { - const index = ctx.circular.get(value); - if (index !== undefined) { - const reference = ctx.stylize(``, 'special'); - // Add reference always to the very beginning of the output. - if (ctx.compact !== true) { - base = base === '' ? reference : `${reference} ${base}`; - } else { - braces[0] = `${reference} ${braces[0]}`; - } - } - } - ctx.seen.pop(); - - if (ctx.sorted) { - const comparator = ctx.sorted === true ? undefined : ctx.sorted; - if (extrasType === kObjectType) { - output = output.sort(comparator); - } else if (keys.length > 1) { - const sorted = output.slice(output.length - keys.length).sort(comparator); - output.splice(output.length - keys.length, keys.length, ...sorted); - } - } - - const res = reduceToSingleString( - ctx, output, base, braces, extrasType, recurseTimes, value); - const budget = ctx.budget[ctx.indentationLvl] || 0; - const newLength = budget + res.length; - ctx.budget[ctx.indentationLvl] = newLength; - // If any indentationLvl exceeds this limit, limit further inspecting to the - // minimum. Otherwise the recursive algorithm might continue inspecting the - // object even though the maximum string size (~2 ** 28 on 32 bit systems and - // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at - // exactly 2 ** 27 but a bit higher. This depends on the object shape. - // This limit also makes sure that huge objects don't block the event loop - // significantly. - if (newLength > 2 ** 27) { - ctx.depth = -1; - } - return res; -} - -function getIteratorBraces(type, tag) { - if (tag !== `${type} Iterator`) { - if (tag !== '') - tag += '] ['; - tag += `${type} Iterator`; - } - return [`[${tag}] {`, '}']; -} - -function getBoxedBase(value, ctx, keys, constructor, tag) { - let fn; - let type; - if (isNumberObject(value)) { - fn = NumberPrototypeValueOf; - type = 'Number'; - } else if (isStringObject(value)) { - fn = StringPrototypeValueOf; - type = 'String'; - // For boxed Strings, we have to remove the 0-n indexed entries, - // since they just noisy up the output and are redundant - // Make boxed primitive Strings look like such - keys.splice(0, value.length); - } else if (isBooleanObject(value)) { - fn = BooleanPrototypeValueOf; - type = 'Boolean'; - } else if (isBigIntObject(value)) { - fn = BigIntPrototypeValueOf; - type = 'BigInt'; - } else { - fn = SymbolPrototypeValueOf; - type = 'Symbol'; - } - let base = `[${type}`; - if (type !== constructor) { - if (constructor === null) { - base += ' (null prototype)'; - } else { - base += ` (${constructor})`; - } - } - base += `: ${formatPrimitive(stylizeNoColor, fn(value), ctx)}]`; - if (tag !== '' && tag !== constructor) { - base += ` [${tag}]`; - } - if (keys.length !== 0 || ctx.stylize === stylizeNoColor) - return base; - return ctx.stylize(base, StringPrototypeToLowerCase(type)); -} - -function getClassBase(value, constructor, tag) { - const hasName = ObjectPrototypeHasOwnProperty(value, 'name'); - const name = (hasName && value.name) || '(anonymous)'; - let base = `class ${name}`; - if (constructor !== 'Function' && constructor !== null) { - base += ` [${constructor}]`; - } - if (tag !== '' && constructor !== tag) { - base += ` [${tag}]`; - } - if (constructor !== null) { - const superName = ObjectGetPrototypeOf(value).name; - if (superName) { - base += ` extends ${superName}`; - } - } else { - base += ' extends [null prototype]'; - } - return `[${base}]`; -} - -function getFunctionBase(value, constructor, tag) { - const stringified = FunctionPrototypeToString(value); - if (stringified.startsWith('class') && stringified.endsWith('}')) { - const slice = stringified.slice(5, -1); - const bracketIndex = slice.indexOf('{'); - if (bracketIndex !== -1 && - (!slice.slice(0, bracketIndex).includes('(') || - // Slow path to guarantee that it's indeed a class. - classRegExp.test(slice.replace(stripCommentsRegExp)))) { - return getClassBase(value, constructor, tag); - } - } - let type = 'Function'; - if (isGeneratorFunction(value)) { - type = `Generator${type}`; - } - if (isAsyncFunction(value)) { - type = `Async${type}`; - } - let base = `[${type}`; - if (constructor === null) { - base += ' (null prototype)'; - } - if (value.name === '') { - base += ' (anonymous)'; - } else { - base += `: ${value.name}`; - } - base += ']'; - if (constructor !== type && constructor !== null) { - base += ` ${constructor}`; - } - if (tag !== '' && constructor !== tag) { - base += ` [${tag}]`; - } - return base; -} - -function identicalSequenceRange(a, b) { - for (let i = 0; i < a.length - 3; i++) { - // Find the first entry of b that matches the current entry of a. - const pos = b.indexOf(a[i]); - if (pos !== -1) { - const rest = b.length - pos; - if (rest > 3) { - let len = 1; - const maxLen = MathMin(a.length - i, rest); - // Count the number of consecutive entries. - while (maxLen > len && a[i + len] === b[pos + len]) { - len++; - } - if (len > 3) { - return { len, offset: i }; - } - } - } - } - - return { len: 0, offset: 0 }; -} - -function getStackString(error) { - return error.stack ? String(error.stack) : ErrorPrototypeToString(error); -} - -function getStackFrames(ctx, err, stack) { - const frames = stack.split('\n'); - - // Remove stack frames identical to frames in cause. - if (err.cause && isError(err.cause)) { - const causeStack = getStackString(err.cause); - const causeStackStart = causeStack.indexOf('\n at'); - if (causeStackStart !== -1) { - const causeFrames = causeStack.slice(causeStackStart + 1).split('\n'); - const { len, offset } = identicalSequenceRange(frames, causeFrames); - if (len > 0) { - const skipped = len - 2; - const msg = ` ... ${skipped} lines matching cause stack trace ...`; - frames.splice(offset + 1, skipped, ctx.stylize(msg, 'undefined')); - } - } - } - return frames; -} - -function improveStack(stack, constructor, name, tag) { - // A stack trace may contain arbitrary data. Only manipulate the output - // for "regular errors" (errors that "look normal") for now. - let len = name.length; - - if (constructor === null || - (name.endsWith('Error') && - stack.startsWith(name) && - (stack.length === len || stack[len] === ':' || stack[len] === '\n'))) { - let fallback = 'Error'; - if (constructor === null) { - const start = stack.match(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/) || - stack.match(/^([a-z_A-Z0-9-]*Error)$/); - fallback = (start && start[1]) || ''; - len = fallback.length; - fallback = fallback || 'Error'; - } - const prefix = getPrefix(constructor, tag, fallback).slice(0, -1); - if (name !== prefix) { - if (prefix.includes(name)) { - if (len === 0) { - stack = `${prefix}: ${stack}`; - } else { - stack = `${prefix}${stack.slice(len)}`; - } - } else { - stack = `${prefix} [${name}]${stack.slice(len)}`; - } - } - } - return stack; -} - -function removeDuplicateErrorKeys(ctx, keys, err, stack) { - if (!ctx.showHidden && keys.length !== 0) { - for (const name of ['name', 'message', 'stack']) { - const index = keys.indexOf(name); - // Only hide the property in case it's part of the original stack - if (index !== -1 && stack.includes(err[name])) { - keys.splice(index, 1); - } - } - } -} - -function formatError(err, constructor, tag, ctx, keys) { - const name = err.name != null ? String(err.name) : 'Error'; - let stack = getStackString(err); - - removeDuplicateErrorKeys(ctx, keys, err, stack); - - if ('cause' in err && - (keys.length === 0 || !keys.includes('cause'))) { - keys.push('cause'); - } - - stack = improveStack(stack, constructor, name, tag); - - // Ignore the error message if it's contained in the stack. - let pos = (err.message && stack.indexOf(err.message)) || -1; - if (pos !== -1) - pos += err.message.length; - // Wrap the error in brackets in case it has no stack trace. - const stackStart = stack.indexOf('\n at', pos); - if (stackStart === -1) { - stack = `[${stack}]`; - } else { - let newStack = stack.slice(0, stackStart); - const lines = getStackFrames(ctx, err, stack.slice(stackStart + 1)); - if (ctx.colors) { - // Highlight userland code and node modules. - for (const line of lines) { - const core = line.match(coreModuleRegExp); - if (core !== null && NativeModule.exists(core[1])) { - newStack += `\n${ctx.stylize(line, 'undefined')}`; - } else { - // This adds underscores to all node_modules to quickly identify them. - let nodeModule; - newStack += '\n'; - let pos = 0; - while ((nodeModule = nodeModulesRegExp.exec(line)) !== null) { - // '/node_modules/'.length === 14 - newStack += line.slice(pos, nodeModule.index + 14); - newStack += ctx.stylize(nodeModule[1], 'module'); - pos = nodeModule.index + nodeModule[0].length; - } - newStack += pos === 0 ? line : line.slice(pos); - } - } - } else { - newStack += `\n${lines.join('\n')}`; - } - stack = newStack; - } - // The message and the stack have to be indented as well! - if (ctx.indentationLvl !== 0) { - const indentation = ' '.repeat(ctx.indentationLvl); - stack = stack.replace(/\n/g, `\n${indentation}`); - } - return stack; -} - -function groupArrayElements(ctx, output, value) { - let totalLength = 0; - let maxLength = 0; - let i = 0; - let outputLength = output.length; - if (ctx.maxArrayLength < output.length) { - // This makes sure the "... n more items" part is not taken into account. - outputLength--; - } - const separatorSpace = 2; // Add 1 for the space and 1 for the separator. - const dataLen = new Array(outputLength); - // Calculate the total length of all output entries and the individual max - // entries length of all output entries. We have to remove colors first, - // otherwise the length would not be calculated properly. - for (; i < outputLength; i++) { - const len = getStringWidth(output[i], ctx.colors); - dataLen[i] = len; - totalLength += len + separatorSpace; - if (maxLength < len) - maxLength = len; - } - // Add two to `maxLength` as we add a single whitespace character plus a comma - // in-between two entries. - const actualMax = maxLength + separatorSpace; - // Check if at least three entries fit next to each other and prevent grouping - // of arrays that contains entries of very different length (i.e., if a single - // entry is longer than 1/5 of all other entries combined). Otherwise the - // space in-between small entries would be enormous. - if (actualMax * 3 + ctx.indentationLvl < ctx.breakLength && - (totalLength / actualMax > 5 || maxLength <= 6)) { - - const approxCharHeights = 2.5; - const averageBias = MathSqrt(actualMax - totalLength / output.length); - const biasedMax = MathMax(actualMax - 3 - averageBias, 1); - // Dynamically check how many columns seem possible. - const columns = MathMin( - // Ideally a square should be drawn. We expect a character to be about 2.5 - // times as high as wide. This is the area formula to calculate a square - // which contains n rectangles of size `actualMax * approxCharHeights`. - // Divide that by `actualMax` to receive the correct number of columns. - // The added bias increases the columns for short entries. - MathRound( - MathSqrt( - approxCharHeights * biasedMax * outputLength - ) / biasedMax - ), - // Do not exceed the breakLength. - MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), - // Limit array grouping for small `compact` modes as the user requested - // minimal grouping. - ctx.compact * 4, - // Limit the columns to a maximum of fifteen. - 15 - ); - // Return with the original output if no grouping should happen. - if (columns <= 1) { - return output; - } - const tmp = []; - const maxLineLength = []; - for (let i = 0; i < columns; i++) { - let lineMaxLength = 0; - for (let j = i; j < output.length; j += columns) { - if (dataLen[j] > lineMaxLength) - lineMaxLength = dataLen[j]; - } - lineMaxLength += separatorSpace; - maxLineLength[i] = lineMaxLength; - } - let order = StringPrototypePadStart; - if (value !== undefined) { - for (let i = 0; i < output.length; i++) { - if (typeof value[i] !== 'number' && typeof value[i] !== 'bigint') { - order = StringPrototypePadEnd; - break; - } - } - } - // Each iteration creates a single line of grouped entries. - for (let i = 0; i < outputLength; i += columns) { - // The last lines may contain less entries than columns. - const max = MathMin(i + columns, outputLength); - let str = ''; - let j = i; - for (; j < max - 1; j++) { - // Calculate extra color padding in case it's active. This has to be - // done line by line as some lines might contain more colors than - // others. - const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; - str += order(`${output[j]}, `, padding, ' '); - } - if (order === StringPrototypePadStart) { - const padding = maxLineLength[j - i] + - output[j].length - - dataLen[j] - - separatorSpace; - str += StringPrototypePadStart(output[j], padding, ' '); - } else { - str += output[j]; - } - ArrayPrototypePush(tmp, str); - } - if (ctx.maxArrayLength < output.length) { - ArrayPrototypePush(tmp, output[outputLength]); - } - output = tmp; - } - return output; -} - -function handleMaxCallStackSize(ctx, err, constructorName, indentationLvl) { - if (isStackOverflowError(err)) { - ctx.seen.pop(); - ctx.indentationLvl = indentationLvl; - return ctx.stylize( - `[${constructorName}: Inspection interrupted ` + - 'prematurely. Maximum call stack size exceeded.]', - 'special' - ); - } - /* c8 ignore next */ - assert.fail(err.stack); -} - -function addNumericSeparator(integerString) { - let result = ''; - let i = integerString.length; - const start = integerString.startsWith('-') ? 1 : 0; - for (; i >= start + 4; i -= 3) { - result = `_${integerString.slice(i - 3, i)}${result}`; - } - return i === integerString.length ? - integerString : - `${integerString.slice(0, i)}${result}`; -} - -function addNumericSeparatorEnd(integerString) { - let result = ''; - let i = 0; - for (; i < integerString.length - 3; i += 3) { - result += `${integerString.slice(i, i + 3)}_`; - } - return i === 0 ? - integerString : - `${result}${integerString.slice(i)}`; -} - -function formatNumber(fn, number, numericSeparator) { - if (!numericSeparator) { - // Format -0 as '-0'. Checking `number === -0` won't distinguish 0 from -0. - if (ObjectIs(number, -0)) { - return fn('-0', 'number'); - } - return fn(`${number}`, 'number'); - } - const integer = MathTrunc(number); - const string = String(integer); - if (integer === number) { - if (!NumberIsFinite(number) || string.includes('e')) { - return fn(string, 'number'); - } - return fn(`${addNumericSeparator(string)}`, 'number'); - } - if (NumberIsNaN(number)) { - return fn(string, 'number'); - } - return fn(`${ - addNumericSeparator(string) - }.${ - addNumericSeparatorEnd(String(number).slice(string.length + 1)) - }`, 'number'); -} - -function formatBigInt(fn, bigint, numericSeparator) { - const string = String(bigint); - if (!numericSeparator) { - return fn(`${string}n`, 'bigint'); - } - return fn(`${addNumericSeparator(string)}n`, 'bigint'); -} - -function formatPrimitive(fn, value, ctx) { - if (typeof value === 'string') { - let trailer = ''; - if (value.length > ctx.maxStringLength) { - const remaining = value.length - ctx.maxStringLength; - value = value.slice(0, ctx.maxStringLength); - trailer = `... ${remaining} more character${remaining > 1 ? 's' : ''}`; - } - if (ctx.compact !== true && - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. - value.length > kMinLineLength && - value.length > ctx.breakLength - ctx.indentationLvl - 4) { - return value - .split(/(?:=\n)/) - .map((line) => fn(strEscape(line), 'string')) - .join(` +\n${' '.repeat(ctx.indentationLvl + 2)}`) + trailer; - } - return fn(strEscape(value), 'string') + trailer; - } - if (typeof value === 'number') - return formatNumber(fn, value, ctx.numericSeparator); - if (typeof value === 'bigint') - return formatBigInt(fn, value, ctx.numericSeparator); - if (typeof value === 'boolean') - return fn(`${value}`, 'boolean'); - if (typeof value === 'undefined') - return fn('undefined', 'undefined'); - // es6 symbol primitive - return fn(SymbolPrototypeToString(value), 'symbol'); -} - -function formatNamespaceObject(keys, ctx, value, recurseTimes) { - const output = new Array(keys.length); - for (let i = 0; i < keys.length; i++) { - try { - output[i] = formatProperty(ctx, value, recurseTimes, keys[i], - kObjectType); - } catch (err) { - assert(isNativeError(err) && err.name === 'ReferenceError'); - // Use the existing functionality. This makes sure the indentation and - // line breaks are always correct. Otherwise it is very difficult to keep - // this aligned, even though this is a hacky way of dealing with this. - const tmp = { [keys[i]]: '' }; - output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); - const pos = output[i].lastIndexOf(' '); - // We have to find the last whitespace and have to replace that value as - // it will be visualized as a regular string. - output[i] = output[i].slice(0, pos + 1) + - ctx.stylize('', 'special'); - } - } - // Reset the keys to an empty array. This prevents duplicated inspection. - keys.length = 0; - return output; -} - -// The array is sparse and/or has extra keys -function formatSpecialArray(ctx, value, recurseTimes, maxLength, output, i) { - const keys = ObjectKeys(value); - let index = i; - for (; i < keys.length && output.length < maxLength; i++) { - const key = keys[i]; - const tmp = +key; - // Arrays can only have up to 2^32 - 1 entries - if (tmp > 2 ** 32 - 2) { - break; - } - if (`${index}` !== key) { - if (!numberRegExp.test(key)) { - break; - } - const emptyItems = tmp - index; - const ending = emptyItems > 1 ? 's' : ''; - const message = `<${emptyItems} empty item${ending}>`; - output.push(ctx.stylize(message, 'undefined')); - index = tmp; - if (output.length === maxLength) { - break; - } - } - output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); - index++; - } - const remaining = value.length - index; - if (output.length !== maxLength) { - if (remaining > 0) { - const ending = remaining > 1 ? 's' : ''; - const message = `<${remaining} empty item${ending}>`; - output.push(ctx.stylize(message, 'undefined')); - } - } else if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); - } - return output; -} - -function formatArrayBuffer(ctx, value) { - let buffer; - try { - buffer = new Uint8Array(value); - } catch { - return [ctx.stylize('(detached)', 'special')]; - } - if (hexSlice === undefined) - hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); - let str = StringPrototypeTrim(StringPrototypeReplace( - hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)), - /(.{2})/g, '$1 ')); - const remaining = buffer.length - ctx.maxArrayLength; - if (remaining > 0) - str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; - return [`${ctx.stylize('[Uint8Contents]', 'special')}: <${str}>`]; -} - -function formatArray(ctx, value, recurseTimes) { - const valLen = value.length; - const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); - - const remaining = valLen - len; - const output = []; - for (let i = 0; i < len; i++) { - // Special handle sparse arrays. - if (!ObjectPrototypeHasOwnProperty(value, i)) { - return formatSpecialArray(ctx, value, recurseTimes, len, output, i); - } - output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); - } - if (remaining > 0) - output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); - return output; -} - -function formatTypedArray(value, length, ctx, ignored, recurseTimes) { - const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), length); - const remaining = value.length - maxLength; - const output = new Array(maxLength); - const elementFormatter = value.length > 0 && typeof value[0] === 'number' ? - formatNumber : - formatBigInt; - for (let i = 0; i < maxLength; ++i) { - output[i] = elementFormatter(ctx.stylize, value[i], ctx.numericSeparator); - } - if (remaining > 0) { - output[maxLength] = `... ${remaining} more item${remaining > 1 ? 's' : ''}`; - } - if (ctx.showHidden) { - // .buffer goes last, it's not a primitive like the others. - // All besides `BYTES_PER_ELEMENT` are actually getters. - ctx.indentationLvl += 2; - for (const key of [ - 'BYTES_PER_ELEMENT', - 'length', - 'byteLength', - 'byteOffset', - 'buffer', - ]) { - const str = formatValue(ctx, value[key], recurseTimes, true); - ArrayPrototypePush(output, `[${key}]: ${str}`); - } - ctx.indentationLvl -= 2; - } - return output; -} - -function formatSet(value, ctx, ignored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const v of value) { - ArrayPrototypePush(output, formatValue(ctx, v, recurseTimes)); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatMap(value, ctx, ignored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const { 0: k, 1: v } of value) { - output.push( - `${formatValue(ctx, k, recurseTimes)} => ${formatValue(ctx, v, recurseTimes)}` - ); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatSetIterInner(ctx, recurseTimes, entries, state) { - const maxArrayLength = MathMax(ctx.maxArrayLength, 0); - const maxLength = MathMin(maxArrayLength, entries.length); - const output = new Array(maxLength); - ctx.indentationLvl += 2; - for (let i = 0; i < maxLength; i++) { - output[i] = formatValue(ctx, entries[i], recurseTimes); - } - ctx.indentationLvl -= 2; - if (state === kWeak && !ctx.sorted) { - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - ArrayPrototypeSort(output); - } - const remaining = entries.length - maxLength; - if (remaining > 0) { - ArrayPrototypePush(output, - `... ${remaining} more item${remaining > 1 ? 's' : ''}`); - } - return output; -} - -function formatMapIterInner(ctx, recurseTimes, entries, state) { - const maxArrayLength = MathMax(ctx.maxArrayLength, 0); - // Entries exist as [key1, val1, key2, val2, ...] - const len = entries.length / 2; - const remaining = len - maxArrayLength; - const maxLength = MathMin(maxArrayLength, len); - let output = new Array(maxLength); - let i = 0; - ctx.indentationLvl += 2; - if (state === kWeak) { - for (; i < maxLength; i++) { - const pos = i * 2; - output[i] = - `${formatValue(ctx, entries[pos], recurseTimes)} => ${formatValue(ctx, entries[pos + 1], recurseTimes)}`; - } - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - if (!ctx.sorted) - output = output.sort(); - } else { - for (; i < maxLength; i++) { - const pos = i * 2; - const res = [ - formatValue(ctx, entries[pos], recurseTimes), - formatValue(ctx, entries[pos + 1], recurseTimes), - ]; - output[i] = reduceToSingleString( - ctx, res, '', ['[', ']'], kArrayExtrasType, recurseTimes); - } - } - ctx.indentationLvl -= 2; - if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); - } - return output; -} - -function formatWeakCollection(ctx) { - return [ctx.stylize('', 'special')]; -} - -function formatWeakSet(ctx, value, recurseTimes) { - const entries = previewEntries(value); - return formatSetIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatWeakMap(ctx, value, recurseTimes) { - const entries = previewEntries(value); - return formatMapIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatIterator(braces, ctx, value, recurseTimes) { - const { 0: entries, 1: isKeyValue } = previewEntries(value, true); - if (isKeyValue) { - // Mark entry iterators as such. - braces[0] = braces[0].replace(/ Iterator] {$/, ' Entries] {'); - return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); - } - - return formatSetIterInner(ctx, recurseTimes, entries, kIterator); -} - -function formatPromise(ctx, value, recurseTimes) { - let output; - const { 0: state, 1: result } = getPromiseDetails(value); - if (state === kPending) { - output = [ctx.stylize('', 'special')]; - } else { - ctx.indentationLvl += 2; - const str = formatValue(ctx, result, recurseTimes); - ctx.indentationLvl -= 2; - output = [ - state === kRejected ? - `${ctx.stylize('', 'special')} ${str}` : - str, - ]; - } - return output; -} - -function formatProperty(ctx, value, recurseTimes, key, type, desc, - original = value) { - let name, str; - let extra = ' '; - desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || - { value: value[key], enumerable: true }; - if (desc.value !== undefined) { - const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; - ctx.indentationLvl += diff; - str = formatValue(ctx, desc.value, recurseTimes); - if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { - extra = `\n${' '.repeat(ctx.indentationLvl)}`; - } - ctx.indentationLvl -= diff; - } else if (desc.get !== undefined) { - const label = desc.set !== undefined ? 'Getter/Setter' : 'Getter'; - const s = ctx.stylize; - const sp = 'special'; - if (ctx.getters && (ctx.getters === true || - (ctx.getters === 'get' && desc.set === undefined) || - (ctx.getters === 'set' && desc.set !== undefined))) { - try { - const tmp = FunctionPrototypeCall(desc.get, original); - ctx.indentationLvl += 2; - if (tmp === null) { - str = `${s(`[${label}:`, sp)} ${s('null', 'null')}${s(']', sp)}`; - } else if (typeof tmp === 'object') { - str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; - } else { - const primitive = formatPrimitive(s, tmp, ctx); - str = `${s(`[${label}:`, sp)} ${primitive}${s(']', sp)}`; - } - ctx.indentationLvl -= 2; - } catch (err) { - const message = ``; - str = `${s(`[${label}:`, sp)} ${message}${s(']', sp)}`; - } - } else { - str = ctx.stylize(`[${label}]`, sp); - } - } else if (desc.set !== undefined) { - str = ctx.stylize('[Setter]', 'special'); - } else { - str = ctx.stylize('undefined', 'undefined'); - } - if (type === kArrayType) { - return str; - } - if (typeof key === 'symbol') { - const tmp = StringPrototypeReplace( - SymbolPrototypeToString(key), - strEscapeSequencesReplacer, escapeFn - ); - name = `[${ctx.stylize(tmp, 'symbol')}]`; - } else if (key === '__proto__') { - name = "['__proto__']"; - } else if (desc.enumerable === false) { - const tmp = StringPrototypeReplace(key, - strEscapeSequencesReplacer, escapeFn); - name = `[${tmp}]`; - } else if (RegExpPrototypeTest(keyStrRegExp, key)) { - name = ctx.stylize(key, 'name'); - } else { - name = ctx.stylize(strEscape(key), 'string'); - } - return `${name}:${extra}${str}`; -} - -function isBelowBreakLength(ctx, output, start, base) { - // Each entry is separated by at least a comma. Thus, we start with a total - // length of at least `output.length`. In addition, some cases have a - // whitespace in-between each other that is added to the total as well. - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. Check the performance overhead and make it an opt-in in case it's - // significant. - let totalLength = output.length + start; - if (totalLength + output.length > ctx.breakLength) - return false; - for (let i = 0; i < output.length; i++) { - if (ctx.colors) { - totalLength += removeColors(output[i]).length; - } else { - totalLength += output[i].length; - } - if (totalLength > ctx.breakLength) { - return false; - } - } - // Do not line up properties on the same line if `base` contains line breaks. - return base === '' || !StringPrototypeIncludes(base, '\n'); -} - -function reduceToSingleString( - ctx, output, base, braces, extrasType, recurseTimes, value) { - if (ctx.compact !== true) { - if (typeof ctx.compact === 'number' && ctx.compact >= 1) { - // Memorize the original output length. In case the output is grouped, - // prevent lining up the entries on a single line. - const entries = output.length; - // Group array elements together if the array contains at least six - // separate entries. - if (extrasType === kArrayExtrasType && entries > 6) { - output = groupArrayElements(ctx, output, value); - } - // `ctx.currentDepth` is set to the most inner depth of the currently - // inspected object part while `recurseTimes` is the actual current depth - // that is inspected. - // - // Example: - // - // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } - // - // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max - // depth of 1. - // - // Consolidate all entries of the local most inner depth up to - // `ctx.compact`, as long as the properties are smaller than - // `ctx.breakLength`. - if (ctx.currentDepth - recurseTimes < ctx.compact && - entries === output.length) { - // Line up all entries on a single line in case the entries do not - // exceed `breakLength`. Add 10 as constant to start next to all other - // factors that may reduce `breakLength`. - const start = output.length + ctx.indentationLvl + - braces[0].length + base.length + 10; - if (isBelowBreakLength(ctx, output, start, base)) { - const joinedOutput = join(output, ', '); - if (!joinedOutput.includes('\n')) { - return `${base ? `${base} ` : ''}${braces[0]} ${joinedOutput}` + - ` ${braces[1]}`; - } - } - } - } - // Line up each entry on an individual line. - const indentation = `\n${StringPrototypeRepeat(' ', ctx.indentationLvl)}`; - return `${base ? `${base} ` : ''}${braces[0]}${indentation} ` + - `${join(output, `,${indentation} `)}${indentation}${braces[1]}`; - } - // Line up all entries on a single line in case the entries do not exceed - // `breakLength`. - if (isBelowBreakLength(ctx, output, 0, base)) { - return `${braces[0]}${base ? ` ${base}` : ''} ${join(output, ', ')} ` + - braces[1]; - } - const indentation = StringPrototypeRepeat(' ', ctx.indentationLvl); - // If the opening "brace" is too large, like in the case of "Set {", - // we need to force the first item to be on the next line or the - // items will not line up correctly. - const ln = base === '' && braces[0].length === 1 ? - ' ' : `${base ? ` ${base}` : ''}\n${indentation} `; - // Line up each entry on an individual line. - return `${braces[0]}${ln}${join(output, `,\n${indentation} `)} ${braces[1]}`; -} - -function hasBuiltInToString(value) { - // Prevent triggering proxy traps. - const getFullProxy = false; - const proxyTarget = getProxyDetails(value, getFullProxy); - if (proxyTarget !== undefined) { - value = proxyTarget; - } - - // Count objects that have no `toString` function as built-in. - if (typeof value.toString !== 'function') { - return true; - } - - // The object has a own `toString` property. Thus it's not not a built-in one. - if (ObjectPrototypeHasOwnProperty(value, 'toString')) { - return false; - } - - // Find the object that has the `toString` property as own property in the - // prototype chain. - let pointer = value; - do { - pointer = ObjectGetPrototypeOf(pointer); - } while (!ObjectPrototypeHasOwnProperty(pointer, 'toString')); - - // Check closer if the object is a built-in. - const descriptor = ObjectGetOwnPropertyDescriptor(pointer, 'constructor'); - return descriptor !== undefined && - typeof descriptor.value === 'function' && - builtInObjects.has(descriptor.value.name); -} - -const firstErrorLine = (error) => - StringPrototypeSplit(error.message, '\n', 1)[0]; -let CIRCULAR_ERROR_MESSAGE; -function tryStringify(arg) { - try { - return JSONStringify(arg); - } catch (err) { - // Populate the circular error message lazily - if (!CIRCULAR_ERROR_MESSAGE) { - try { - const a = {}; a.a = a; JSONStringify(a); - } catch (circularError) { - CIRCULAR_ERROR_MESSAGE = firstErrorLine(circularError); - } - } - if (err.name === 'TypeError' && - firstErrorLine(err) === CIRCULAR_ERROR_MESSAGE) { - return '[Circular]'; - } - throw err; - } -} - -function format(...args) { - return formatWithOptionsInternal(undefined, args); -} - -function formatWithOptions(inspectOptions, ...args) { - if (typeof inspectOptions !== 'object' || inspectOptions === null) { - throw new ERR_INVALID_ARG_TYPE( - 'inspectOptions', 'object', inspectOptions); - } - return formatWithOptionsInternal(inspectOptions, args); -} - -function formatNumberNoColor(number, options) { - return formatNumber( - stylizeNoColor, - number, - options?.numericSeparator ?? inspectDefaultOptions.numericSeparator - ); -} - -function formatBigIntNoColor(bigint, options) { - return formatBigInt( - stylizeNoColor, - bigint, - options?.numericSeparator ?? inspectDefaultOptions.numericSeparator - ); -} - -function formatWithOptionsInternal(inspectOptions, args) { - const first = args[0]; - let a = 0; - let str = ''; - let join = ''; - - if (typeof first === 'string') { - if (args.length === 1) { - return first; - } - let tempStr; - let lastPos = 0; - - for (let i = 0; i < first.length - 1; i++) { - if (StringPrototypeCharCodeAt(first, i) === 37) { // '%' - const nextChar = StringPrototypeCharCodeAt(first, ++i); - if (a + 1 !== args.length) { - switch (nextChar) { - case 115: { // 's' - const tempArg = args[++a]; - if (typeof tempArg === 'number') { - tempStr = formatNumberNoColor(tempArg, inspectOptions); - } else if (typeof tempArg === 'bigint') { - tempStr = formatBigIntNoColor(tempArg, inspectOptions); - } else if (typeof tempArg !== 'object' || - tempArg === null || - !hasBuiltInToString(tempArg)) { - tempStr = String(tempArg); - } else { - tempStr = inspect(tempArg, { - ...inspectOptions, - compact: 3, - colors: false, - depth: 0 - }); - } - break; - } - case 106: // 'j' - tempStr = tryStringify(args[++a]); - break; - case 100: { // 'd' - const tempNum = args[++a]; - if (typeof tempNum === 'bigint') { - tempStr = formatBigIntNoColor(tempNum, inspectOptions); - } else if (typeof tempNum === 'symbol') { - tempStr = 'NaN'; - } else { - tempStr = formatNumberNoColor(Number(tempNum), inspectOptions); - } - break; - } - case 79: // 'O' - tempStr = inspect(args[++a], inspectOptions); - break; - case 111: // 'o' - tempStr = inspect(args[++a], { - ...inspectOptions, - showHidden: true, - showProxy: true, - depth: 4 - }); - break; - case 105: { // 'i' - const tempInteger = args[++a]; - if (typeof tempInteger === 'bigint') { - tempStr = formatBigIntNoColor(tempInteger, inspectOptions); - } else if (typeof tempInteger === 'symbol') { - tempStr = 'NaN'; - } else { - tempStr = formatNumberNoColor( - NumberParseInt(tempInteger), inspectOptions); - } - break; - } - case 102: { // 'f' - const tempFloat = args[++a]; - if (typeof tempFloat === 'symbol') { - tempStr = 'NaN'; - } else { - tempStr = formatNumberNoColor( - NumberParseFloat(tempFloat), inspectOptions); - } - break; - } - case 99: // 'c' - a += 1; - tempStr = ''; - break; - case 37: // '%' - str += StringPrototypeSlice(first, lastPos, i); - lastPos = i + 1; - continue; - default: // Any other character is not a correct placeholder - continue; - } - if (lastPos !== i - 1) { - str += StringPrototypeSlice(first, lastPos, i - 1); - } - str += tempStr; - lastPos = i + 1; - } else if (nextChar === 37) { - str += StringPrototypeSlice(first, lastPos, i); - lastPos = i + 1; - } - } - } - if (lastPos !== 0) { - a++; - join = ' '; - if (lastPos < first.length) { - str += StringPrototypeSlice(first, lastPos); - } - } - } - - while (a < args.length) { - const value = args[a]; - str += join; - str += typeof value !== 'string' ? inspect(value, inspectOptions) : value; - join = ' '; - a++; - } - return str; -} - -if (false) { - const icu = {}; - // icu.getStringWidth(string, ambiguousAsFullWidth, expandEmojiSequence) - // Defaults: ambiguousAsFullWidth = false; expandEmojiSequence = true; - // TODO(BridgeAR): Expose the options to the user. That is probably the - // best thing possible at the moment, since it's difficult to know what - // the receiving end supports. - getStringWidth = function getStringWidth(str, removeControlChars = true) { - let width = 0; - - if (removeControlChars) - str = stripVTControlCharacters(str); - for (let i = 0; i < str.length; i++) { - // Try to avoid calling into C++ by first handling the ASCII portion of - // the string. If it is fully ASCII, we skip the C++ part. - const code = str.charCodeAt(i); - if (code >= 127) { - width += icu.getStringWidth(str.slice(i).normalize('NFC')); - break; - } - width += code >= 32 ? 1 : 0; - } - return width; - }; -} else { - /** - * Returns the number of columns required to display the given string. - */ - getStringWidth = function getStringWidth(str, removeControlChars = true) { - let width = 0; - - if (removeControlChars) - str = stripVTControlCharacters(str); - str = StringPrototypeNormalize(str, 'NFC'); - for (const char of new SafeStringIterator(str)) { - const code = StringPrototypeCodePointAt(char, 0); - if (isFullWidthCodePoint(code)) { - width += 2; - } else if (!isZeroWidthCodePoint(code)) { - width++; - } - } - - return width; - }; - - /** - * Returns true if the character represented by a given - * Unicode code point is full-width. Otherwise returns false. - */ - const isFullWidthCodePoint = (code) => { - // Code points are partially derived from: - // https://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt - return code >= 0x1100 && ( - code <= 0x115f || // Hangul Jamo - code === 0x2329 || // LEFT-POINTING ANGLE BRACKET - code === 0x232a || // RIGHT-POINTING ANGLE BRACKET - // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || - // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - (code >= 0x3250 && code <= 0x4dbf) || - // CJK Unified Ideographs .. Yi Radicals - (code >= 0x4e00 && code <= 0xa4c6) || - // Hangul Jamo Extended-A - (code >= 0xa960 && code <= 0xa97c) || - // Hangul Syllables - (code >= 0xac00 && code <= 0xd7a3) || - // CJK Compatibility Ideographs - (code >= 0xf900 && code <= 0xfaff) || - // Vertical Forms - (code >= 0xfe10 && code <= 0xfe19) || - // CJK Compatibility Forms .. Small Form Variants - (code >= 0xfe30 && code <= 0xfe6b) || - // Halfwidth and Fullwidth Forms - (code >= 0xff01 && code <= 0xff60) || - (code >= 0xffe0 && code <= 0xffe6) || - // Kana Supplement - (code >= 0x1b000 && code <= 0x1b001) || - // Enclosed Ideographic Supplement - (code >= 0x1f200 && code <= 0x1f251) || - // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff - // Emoticons 0x1f600 - 0x1f64f - (code >= 0x1f300 && code <= 0x1f64f) || - // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - (code >= 0x20000 && code <= 0x3fffd) - ); - }; - - const isZeroWidthCodePoint = (code) => { - return code <= 0x1F || // C0 control codes - (code >= 0x7F && code <= 0x9F) || // C1 control codes - (code >= 0x300 && code <= 0x36F) || // Combining Diacritical Marks - (code >= 0x200B && code <= 0x200F) || // Modifying Invisible Characters - // Combining Diacritical Marks for Symbols - (code >= 0x20D0 && code <= 0x20FF) || - (code >= 0xFE00 && code <= 0xFE0F) || // Variation Selectors - (code >= 0xFE20 && code <= 0xFE2F) || // Combining Half Marks - (code >= 0xE0100 && code <= 0xE01EF); // Variation Selectors - }; -} - -/** - * Remove all VT control characters. Use to estimate displayed string width. - */ -function stripVTControlCharacters(str) { - validateString(str, 'str'); - - return str.replace(ansi, ''); -} - -module.exports = { - inspect, - format, - formatWithOptions, - getStringWidth, - inspectDefaultOptions, - stripVTControlCharacters -}; diff --git a/lib/internal/inspect.js b/lib/internal/inspect.js deleted file mode 100644 index bbf1224a0e..0000000000 --- a/lib/internal/inspect.js +++ /dev/null @@ -1,2299 +0,0 @@ -'use strict'; - -const { - Array, - ArrayIsArray, - ArrayPrototypeFilter, - ArrayPrototypeForEach, - ArrayPrototypePop, - ArrayPrototypePush, - ArrayPrototypePushApply, - ArrayPrototypeSort, - ArrayPrototypeUnshift, - BigIntPrototypeValueOf, - BooleanPrototypeValueOf, - DatePrototypeGetTime, - DatePrototypeToISOString, - DatePrototypeToString, - ErrorPrototypeToString, - FunctionPrototypeCall, - FunctionPrototypeToString, - JSONStringify, - MapPrototypeGetSize, - MapPrototypeEntries, - MathFloor, - MathMax, - MathMin, - MathRound, - MathSqrt, - MathTrunc, - Number, - NumberIsFinite, - NumberIsNaN, - NumberParseFloat, - NumberParseInt, - NumberPrototypeValueOf, - Object, - ObjectAssign, - ObjectCreate, - ObjectDefineProperty, - ObjectGetOwnPropertyDescriptor, - ObjectGetOwnPropertyNames, - ObjectGetOwnPropertySymbols, - ObjectGetPrototypeOf, - ObjectIs, - ObjectKeys, - ObjectPrototypeHasOwnProperty, - ObjectPrototypePropertyIsEnumerable, - ObjectSeal, - ObjectSetPrototypeOf, - ReflectOwnKeys, - RegExp, - RegExpPrototypeTest, - RegExpPrototypeToString, - SafeStringIterator, - SafeMap, - SafeSet, - SetPrototypeGetSize, - SetPrototypeValues, - String, - StringPrototypeCharCodeAt, - StringPrototypeCodePointAt, - StringPrototypeIncludes, - StringPrototypeNormalize, - StringPrototypePadEnd, - StringPrototypePadStart, - StringPrototypeRepeat, - StringPrototypeReplace, - StringPrototypeSlice, - StringPrototypeSplit, - StringPrototypeToLowerCase, - StringPrototypeTrim, - StringPrototypeValueOf, - SymbolPrototypeToString, - SymbolPrototypeValueOf, - SymbolIterator, - SymbolToStringTag, - TypedArrayPrototypeGetLength, - TypedArrayPrototypeGetSymbolToStringTag, - Uint8Array, - globalThis, - uncurryThis, -} = require('./primordials'); - -const { - getOwnNonIndexProperties, - getPromiseDetails, - getProxyDetails, - kPending, - kRejected, - previewEntries, - getConstructorName: internalGetConstructorName, - getExternalValue, - propertyFilter: { - ALL_PROPERTIES, - ONLY_ENUMERABLE - } -} = require('../util'); - -const { - customInspectSymbol, - isError, - join, - removeColors -} = require('../util'); - -const { - codes: { - ERR_INVALID_ARG_TYPE - }, - isStackOverflowError -} = require('./errors'); - -const { - isAsyncFunction, - isGeneratorFunction, - isAnyArrayBuffer, - isArrayBuffer, - isArgumentsObject, - isBoxedPrimitive, - isDataView, - isExternal, - isMap, - isMapIterator, - isModuleNamespaceObject, - isNativeError, - isPromise, - isSet, - isSetIterator, - isWeakMap, - isWeakSet, - isRegExp, - isDate, - isTypedArray, - isStringObject, - isNumberObject, - isBooleanObject, - isBigIntObject, -} = require('../util'); - -const assert = require('assert'); - -const { NativeModule } = - { - NativeModule: { - exists() { - return false; - } - } - } - -const { - validateObject, - validateString, -} = require('./validators'); - -let hexSlice; - -const builtInObjects = new SafeSet( - ArrayPrototypeFilter( - ObjectGetOwnPropertyNames(globalThis), - (e) => RegExpPrototypeTest(/^[A-Z][a-zA-Z0-9]+$/, e) - ) -); - -// https://tc39.es/ecma262/#sec-IsHTMLDDA-internal-slot -const isUndetectableObject = (v) => typeof v === 'undefined' && v !== undefined; - -// These options must stay in sync with `getUserOptions`. So if any option will -// be added or removed, `getUserOptions` must also be updated accordingly. -const inspectDefaultOptions = ObjectSeal({ - showHidden: false, - depth: 2, - colors: false, - customInspect: true, - showProxy: false, - maxArrayLength: 100, - maxStringLength: 10000, - breakLength: 80, - compact: 3, - sorted: false, - getters: false, - numericSeparator: false, -}); - -const kObjectType = 0; -const kArrayType = 1; -const kArrayExtrasType = 2; - -/* eslint-disable no-control-regex */ -const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c\x7f-\x9f]|[\ud800-\udbff](?![\udc00-\udfff])|(?<~]))'; -const ansi = new RegExp(ansiPattern, 'g'); - -let getStringWidth; - -function getUserOptions(ctx, isCrossContext) { - const ret = { - stylize: ctx.stylize, - showHidden: ctx.showHidden, - depth: ctx.depth, - colors: ctx.colors, - customInspect: ctx.customInspect, - showProxy: ctx.showProxy, - maxArrayLength: ctx.maxArrayLength, - maxStringLength: ctx.maxStringLength, - breakLength: ctx.breakLength, - compact: ctx.compact, - sorted: ctx.sorted, - getters: ctx.getters, - numericSeparator: ctx.numericSeparator, - ...ctx.userOptions - }; - - // Typically, the target value will be an instance of `Object`. If that is - // *not* the case, the object may come from another vm.Context, and we want - // to avoid passing it objects from this Context in that case, so we remove - // the prototype from the returned object itself + the `stylize()` function, - // and remove all other non-primitives, including non-primitive user options. - if (isCrossContext) { - ObjectSetPrototypeOf(ret, null); - for (const key of ObjectKeys(ret)) { - if ((typeof ret[key] === 'object' || typeof ret[key] === 'function') && - ret[key] !== null) { - delete ret[key]; - } - } - ret.stylize = ObjectSetPrototypeOf((value, flavour) => { - let stylized; - try { - stylized = `${ctx.stylize(value, flavour)}`; - } catch { - // Continue regardless of error. - } - - if (typeof stylized !== 'string') return value; - // `stylized` is a string as it should be, which is safe to pass along. - return stylized; - }, null); - } - - return ret; -} - -/** - * Echos the value of any input. Tries to print the value out - * in the best way possible given the different types. - * - * @param {any} value The value to print out. - * @param {object} opts Optional options object that alters the output. - */ -/* Legacy: value, showHidden, depth, colors */ -function inspect(value, opts) { - // Default options - const ctx = { - budget: {}, - indentationLvl: 0, - seen: [], - currentDepth: 0, - stylize: stylizeNoColor, - showHidden: inspectDefaultOptions.showHidden, - depth: inspectDefaultOptions.depth, - colors: inspectDefaultOptions.colors, - customInspect: inspectDefaultOptions.customInspect, - showProxy: inspectDefaultOptions.showProxy, - maxArrayLength: inspectDefaultOptions.maxArrayLength, - maxStringLength: inspectDefaultOptions.maxStringLength, - breakLength: inspectDefaultOptions.breakLength, - compact: inspectDefaultOptions.compact, - sorted: inspectDefaultOptions.sorted, - getters: inspectDefaultOptions.getters, - numericSeparator: inspectDefaultOptions.numericSeparator, - }; - if (arguments.length > 1) { - // Legacy... - if (arguments.length > 2) { - if (arguments[2] !== undefined) { - ctx.depth = arguments[2]; - } - if (arguments.length > 3 && arguments[3] !== undefined) { - ctx.colors = arguments[3]; - } - } - // Set user-specified options - if (typeof opts === 'boolean') { - ctx.showHidden = opts; - } else if (opts) { - const optKeys = ObjectKeys(opts); - for (let i = 0; i < optKeys.length; ++i) { - const key = optKeys[i]; - // TODO(BridgeAR): Find a solution what to do about stylize. Either make - // this function public or add a new API with a similar or better - // functionality. - if ( - ObjectPrototypeHasOwnProperty(inspectDefaultOptions, key) || - key === 'stylize') { - ctx[key] = opts[key]; - } else if (ctx.userOptions === undefined) { - // This is required to pass through the actual user input. - ctx.userOptions = opts; - } - } - } - } - if (ctx.colors) ctx.stylize = stylizeWithColor; - if (ctx.maxArrayLength === null) ctx.maxArrayLength = Infinity; - if (ctx.maxStringLength === null) ctx.maxStringLength = Infinity; - return formatValue(ctx, value, 0); -} -inspect.custom = customInspectSymbol; - -ObjectDefineProperty(inspect, 'defaultOptions', { - get() { - return inspectDefaultOptions; - }, - set(options) { - validateObject(options, 'options'); - return ObjectAssign(inspectDefaultOptions, options); - } -}); - -// Set Graphics Rendition https://en.wikipedia.org/wiki/ANSI_escape_code#graphics -// Each color consists of an array with the color code as first entry and the -// reset code as second entry. -const defaultFG = 39; -const defaultBG = 49; -inspect.colors = ObjectAssign(ObjectCreate(null), { - reset: [0, 0], - bold: [1, 22], - dim: [2, 22], // Alias: faint - italic: [3, 23], - underline: [4, 24], - blink: [5, 25], - // Swap foreground and background colors - inverse: [7, 27], // Alias: swapcolors, swapColors - hidden: [8, 28], // Alias: conceal - strikethrough: [9, 29], // Alias: strikeThrough, crossedout, crossedOut - doubleunderline: [21, 24], // Alias: doubleUnderline - black: [30, defaultFG], - red: [31, defaultFG], - green: [32, defaultFG], - yellow: [33, defaultFG], - blue: [34, defaultFG], - magenta: [35, defaultFG], - cyan: [36, defaultFG], - white: [37, defaultFG], - bgBlack: [40, defaultBG], - bgRed: [41, defaultBG], - bgGreen: [42, defaultBG], - bgYellow: [43, defaultBG], - bgBlue: [44, defaultBG], - bgMagenta: [45, defaultBG], - bgCyan: [46, defaultBG], - bgWhite: [47, defaultBG], - framed: [51, 54], - overlined: [53, 55], - gray: [90, defaultFG], // Alias: grey, blackBright - redBright: [91, defaultFG], - greenBright: [92, defaultFG], - yellowBright: [93, defaultFG], - blueBright: [94, defaultFG], - magentaBright: [95, defaultFG], - cyanBright: [96, defaultFG], - whiteBright: [97, defaultFG], - bgGray: [100, defaultBG], // Alias: bgGrey, bgBlackBright - bgRedBright: [101, defaultBG], - bgGreenBright: [102, defaultBG], - bgYellowBright: [103, defaultBG], - bgBlueBright: [104, defaultBG], - bgMagentaBright: [105, defaultBG], - bgCyanBright: [106, defaultBG], - bgWhiteBright: [107, defaultBG], -}); - -function defineColorAlias(target, alias) { - ObjectDefineProperty(inspect.colors, alias, { - get() { - return this[target]; - }, - set(value) { - this[target] = value; - }, - configurable: true, - enumerable: false - }); -} - -defineColorAlias('gray', 'grey'); -defineColorAlias('gray', 'blackBright'); -defineColorAlias('bgGray', 'bgGrey'); -defineColorAlias('bgGray', 'bgBlackBright'); -defineColorAlias('dim', 'faint'); -defineColorAlias('strikethrough', 'crossedout'); -defineColorAlias('strikethrough', 'strikeThrough'); -defineColorAlias('strikethrough', 'crossedOut'); -defineColorAlias('hidden', 'conceal'); -defineColorAlias('inverse', 'swapColors'); -defineColorAlias('inverse', 'swapcolors'); -defineColorAlias('doubleunderline', 'doubleUnderline'); - -// TODO(BridgeAR): Add function style support for more complex styles. -// Don't use 'blue' not visible on cmd.exe -inspect.styles = ObjectAssign(ObjectCreate(null), { - special: 'cyan', - number: 'yellow', - bigint: 'yellow', - boolean: 'yellow', - undefined: 'grey', - null: 'bold', - string: 'green', - symbol: 'green', - date: 'magenta', - // "name": intentionally not styling - // TODO(BridgeAR): Highlight regular expressions properly. - regexp: 'red', - module: 'underline' -}); - -function addQuotes(str, quotes) { - if (quotes === -1) { - return `"${str}"`; - } - if (quotes === -2) { - return `\`${str}\``; - } - return `'${str}'`; -} - -function escapeFn(str) { - const charCode = StringPrototypeCharCodeAt(str); - return meta.length > charCode ? meta[charCode] : `\\u${charCode.toString(16)}`; -} - -// Escape control characters, single quotes and the backslash. -// This is similar to JSON stringify escaping. -function strEscape(str) { - let escapeTest = strEscapeSequencesRegExp; - let escapeReplace = strEscapeSequencesReplacer; - let singleQuote = 39; - - // Check for double quotes. If not present, do not escape single quotes and - // instead wrap the text in double quotes. If double quotes exist, check for - // backticks. If they do not exist, use those as fallback instead of the - // double quotes. - if (StringPrototypeIncludes(str, "'")) { - // This invalidates the charCode and therefore can not be matched for - // anymore. - if (!StringPrototypeIncludes(str, '"')) { - singleQuote = -1; - } else if (!StringPrototypeIncludes(str, '`') && - !StringPrototypeIncludes(str, '${')) { - singleQuote = -2; - } - if (singleQuote !== 39) { - escapeTest = strEscapeSequencesRegExpSingle; - escapeReplace = strEscapeSequencesReplacerSingle; - } - } - - // Some magic numbers that worked out fine while benchmarking with v8 6.0 - if (str.length < 5000 && !RegExpPrototypeTest(escapeTest, str)) - return addQuotes(str, singleQuote); - if (str.length > 100) { - str = StringPrototypeReplace(str, escapeReplace, escapeFn); - return addQuotes(str, singleQuote); - } - - let result = ''; - let last = 0; - for (let i = 0; i < str.length; i++) { - const point = StringPrototypeCharCodeAt(str, i); - if (point === singleQuote || - point === 92 || - point < 32 || - (point > 126 && point < 160)) { - if (last === i) { - result += meta[point]; - } else { - result += `${StringPrototypeSlice(str, last, i)}${meta[point]}`; - } - last = i + 1; - } else if (point >= 0xd800 && point <= 0xdfff) { - if (point <= 0xdbff && i + 1 < str.length) { - const point = StringPrototypeCharCodeAt(str, i + 1); - if (point >= 0xdc00 && point <= 0xdfff) { - i++; - continue; - } - } - result += `${StringPrototypeSlice(str, last, i)}${`\\u${point.toString(16)}`}`; - last = i + 1; - } - } - - if (last !== str.length) { - result += StringPrototypeSlice(str, last); - } - return addQuotes(result, singleQuote); -} - -function stylizeWithColor(str, styleType) { - const style = inspect.styles[styleType]; - if (style !== undefined) { - const color = inspect.colors[style]; - if (color !== undefined) - return `\u001b[${color[0]}m${str}\u001b[${color[1]}m`; - } - return str; -} - -function stylizeNoColor(str) { - return str; -} - -// Return a new empty array to push in the results of the default formatter. -function getEmptyFormatArray() { - return []; -} - -function isInstanceof(object, proto) { - try { - return object instanceof proto; - } catch { - return false; - } -} - -function getConstructorName(obj, ctx, recurseTimes, protoProps) { - let firstProto; - const tmp = obj; - while (obj || isUndetectableObject(obj)) { - const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); - if (descriptor !== undefined && - typeof descriptor.value === 'function' && - descriptor.value.name !== '' && - isInstanceof(tmp, descriptor.value)) { - if (protoProps !== undefined && - (firstProto !== obj || - !builtInObjects.has(descriptor.value.name))) { - addPrototypeProperties( - ctx, tmp, firstProto || tmp, recurseTimes, protoProps); - } - return descriptor.value.name; - } - - obj = ObjectGetPrototypeOf(obj); - if (firstProto === undefined) { - firstProto = obj; - } - } - - if (firstProto === null) { - return null; - } - - const res = internalGetConstructorName(tmp); - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - return `${res} `; - } - - const protoConstr = getConstructorName( - firstProto, ctx, recurseTimes + 1, protoProps); - - if (protoConstr === null) { - return `${res} <${inspect(firstProto, { - ...ctx, - customInspect: false, - depth: -1 - })}>`; - } - - return `${res} <${protoConstr}>`; -} - -// This function has the side effect of adding prototype properties to the -// `output` argument (which is an array). This is intended to highlight user -// defined prototype properties. -function addPrototypeProperties(ctx, main, obj, recurseTimes, output) { - let depth = 0; - let keys; - let keySet; - do { - if (depth !== 0 || main === obj) { - obj = ObjectGetPrototypeOf(obj); - // Stop as soon as a null prototype is encountered. - if (obj === null) { - return; - } - // Stop as soon as a built-in object type is detected. - const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); - if (descriptor !== undefined && - typeof descriptor.value === 'function' && - builtInObjects.has(descriptor.value.name)) { - return; - } - } - - if (depth === 0) { - keySet = new SafeSet(); - } else { - ArrayPrototypeForEach(keys, (key) => keySet.add(key)); - } - // Get all own property names and symbols. - keys = ReflectOwnKeys(obj); - ArrayPrototypePush(ctx.seen, main); - for (const key of keys) { - // Ignore the `constructor` property and keys that exist on layers above. - if (key === 'constructor' || - ObjectPrototypeHasOwnProperty(main, key) || - (depth !== 0 && keySet.has(key))) { - continue; - } - const desc = ObjectGetOwnPropertyDescriptor(obj, key); - if (typeof desc.value === 'function') { - continue; - } - const value = formatProperty( - ctx, obj, recurseTimes, key, kObjectType, desc, main); - if (ctx.colors) { - // Faint! - ArrayPrototypePush(output, `\u001b[2m${value}\u001b[22m`); - } else { - ArrayPrototypePush(output, value); - } - } - ArrayPrototypePop(ctx.seen); - // Limit the inspection to up to three prototype layers. Using `recurseTimes` - // is not a good choice here, because it's as if the properties are declared - // on the current object from the users perspective. - } while (++depth !== 3); -} - -function getPrefix(constructor, tag, fallback, size = '') { - if (constructor === null) { - if (tag !== '' && fallback !== tag) { - return `[${fallback}${size}: null prototype] [${tag}] `; - } - return `[${fallback}${size}: null prototype] `; - } - - if (tag !== '' && constructor !== tag) { - return `${constructor}${size} [${tag}] `; - } - return `${constructor}${size} `; -} - -// Look up the keys of the object. -function getKeys(value, showHidden) { - let keys; - const symbols = ObjectGetOwnPropertySymbols(value); - if (showHidden) { - keys = ObjectGetOwnPropertyNames(value); - if (symbols.length !== 0) - ArrayPrototypePushApply(keys, symbols); - } else { - // This might throw if `value` is a Module Namespace Object from an - // unevaluated module, but we don't want to perform the actual type - // check because it's expensive. - // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 - // and modify this logic as needed. - try { - keys = ObjectKeys(value); - } catch (err) { - assert(isNativeError(err) && err.name === 'ReferenceError' && - isModuleNamespaceObject(value)); - keys = ObjectGetOwnPropertyNames(value); - } - if (symbols.length !== 0) { - const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); - ArrayPrototypePushApply(keys, ArrayPrototypeFilter(symbols, filter)); - } - } - return keys; -} - -function getCtxStyle(value, constructor, tag) { - let fallback = ''; - if (constructor === null) { - fallback = internalGetConstructorName(value); - if (fallback === tag) { - fallback = 'Object'; - } - } - return getPrefix(constructor, tag, fallback); -} - -function formatProxy(ctx, proxy, recurseTimes) { - if (recurseTimes > ctx.depth && ctx.depth !== null) { - return ctx.stylize('Proxy [Array]', 'special'); - } - recurseTimes += 1; - ctx.indentationLvl += 2; - const res = [ - formatValue(ctx, proxy[0], recurseTimes), - formatValue(ctx, proxy[1], recurseTimes), - ]; - ctx.indentationLvl -= 2; - return reduceToSingleString( - ctx, res, '', ['Proxy [', ']'], kArrayExtrasType, recurseTimes); -} - -// Note: using `formatValue` directly requires the indentation level to be -// corrected by setting `ctx.indentationLvL += diff` and then to decrease the -// value afterwards again. -function formatValue(ctx, value, recurseTimes, typedArray) { - // Primitive types cannot have properties. - if (typeof value !== 'object' && - typeof value !== 'function' && - !isUndetectableObject(value)) { - return formatPrimitive(ctx.stylize, value, ctx); - } - if (value === null) { - return ctx.stylize('null', 'null'); - } - - // Memorize the context for custom inspection on proxies. - const context = value; - // Always check for proxies to prevent side effects and to prevent triggering - // any proxy handlers. - const proxy = getProxyDetails(value, !!ctx.showProxy); - if (proxy !== undefined) { - if (ctx.showProxy) { - return formatProxy(ctx, proxy, recurseTimes); - } - value = proxy; - } - - // Provide a hook for user-specified inspect functions. - // Check that value is an object with an inspect function on it. - if (ctx.customInspect) { - const maybeCustom = value[customInspectSymbol]; - if (typeof maybeCustom === 'function' && - // Filter out the util module, its inspect function is special. - maybeCustom !== inspect && - // Also filter out any prototype objects using the circular check. - !(value.constructor && value.constructor.prototype === value)) { - // This makes sure the recurseTimes are reported as before while using - // a counter internally. - const depth = ctx.depth === null ? null : ctx.depth - recurseTimes; - const isCrossContext = - proxy !== undefined || !(context instanceof Object); - const ret = FunctionPrototypeCall( - maybeCustom, - context, - depth, - getUserOptions(ctx, isCrossContext), - inspect - ); - // If the custom inspection method returned `this`, don't go into - // infinite recursion. - if (ret !== context) { - if (typeof ret !== 'string') { - return formatValue(ctx, ret, recurseTimes); - } - return ret.replace(/\n/g, `\n${' '.repeat(ctx.indentationLvl)}`); - } - } - } - - // Using an array here is actually better for the average case than using - // a Set. `seen` will only check for the depth and will never grow too large. - if (ctx.seen.includes(value)) { - let index = 1; - if (ctx.circular === undefined) { - ctx.circular = new SafeMap(); - ctx.circular.set(value, index); - } else { - index = ctx.circular.get(value); - if (index === undefined) { - index = ctx.circular.size + 1; - ctx.circular.set(value, index); - } - } - return ctx.stylize(`[Circular *${index}]`, 'special'); - } - - return formatRaw(ctx, value, recurseTimes, typedArray); -} - -function formatRaw(ctx, value, recurseTimes, typedArray) { - let keys; - let protoProps; - if (ctx.showHidden && (recurseTimes <= ctx.depth || ctx.depth === null)) { - protoProps = []; - } - - const constructor = getConstructorName(value, ctx, recurseTimes, protoProps); - // Reset the variable to check for this later on. - if (protoProps !== undefined && protoProps.length === 0) { - protoProps = undefined; - } - - let tag = value[SymbolToStringTag]; - // Only list the tag in case it's non-enumerable / not an own property. - // Otherwise we'd print this twice. - if (typeof tag !== 'string' || - (tag !== '' && - (ctx.showHidden ? - ObjectPrototypeHasOwnProperty : - ObjectPrototypePropertyIsEnumerable)( - value, SymbolToStringTag - ))) { - tag = ''; - } - let base = ''; - let formatter = getEmptyFormatArray; - let braces; - let noIterator = true; - let i = 0; - const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; - - let extrasType = kObjectType; - - // Iterators and the rest are split to reduce checks. - // We have to check all values in case the constructor is set to null. - // Otherwise it would not possible to identify all types properly. - if (value[SymbolIterator] || constructor === null) { - noIterator = false; - if (ArrayIsArray(value)) { - // Only set the constructor for non ordinary ("Array [...]") arrays. - const prefix = (constructor !== 'Array' || tag !== '') ? - getPrefix(constructor, tag, 'Array', `(${value.length})`) : - ''; - keys = getOwnNonIndexProperties(value, filter); - braces = [`${prefix}[`, ']']; - if (value.length === 0 && keys.length === 0 && protoProps === undefined) - return `${braces[0]}]`; - extrasType = kArrayExtrasType; - formatter = formatArray; - } else if (isSet(value)) { - const size = SetPrototypeGetSize(value); - const prefix = getPrefix(constructor, tag, 'Set', `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null ? - formatSet.bind(null, value) : - formatSet.bind(null, SetPrototypeValues(value)); - if (size === 0 && keys.length === 0 && protoProps === undefined) - return `${prefix}{}`; - braces = [`${prefix}{`, '}']; - } else if (isMap(value)) { - const size = MapPrototypeGetSize(value); - const prefix = getPrefix(constructor, tag, 'Map', `(${size})`); - keys = getKeys(value, ctx.showHidden); - formatter = constructor !== null ? - formatMap.bind(null, value) : - formatMap.bind(null, MapPrototypeEntries(value)); - if (size === 0 && keys.length === 0 && protoProps === undefined) - return `${prefix}{}`; - braces = [`${prefix}{`, '}']; - } else if (isTypedArray(value)) { - keys = getOwnNonIndexProperties(value, filter); - let bound = value; - let fallback = ''; - if (constructor === null) { - fallback = TypedArrayPrototypeGetSymbolToStringTag(value); - // Reconstruct the array information. - bound = new primordials[fallback](value); - } - const size = TypedArrayPrototypeGetLength(value); - const prefix = getPrefix(constructor, tag, fallback, `(${size})`); - braces = [`${prefix}[`, ']']; - if (value.length === 0 && keys.length === 0 && !ctx.showHidden) - return `${braces[0]}]`; - // Special handle the value. The original value is required below. The - // bound function is required to reconstruct missing information. - formatter = formatTypedArray.bind(null, bound, size); - extrasType = kArrayExtrasType; - } else if (isMapIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces('Map', tag); - // Add braces to the formatter parameters. - formatter = formatIterator.bind(null, braces); - } else if (isSetIterator(value)) { - keys = getKeys(value, ctx.showHidden); - braces = getIteratorBraces('Set', tag); - // Add braces to the formatter parameters. - formatter = formatIterator.bind(null, braces); - } else { - noIterator = true; - } - } - if (noIterator) { - keys = getKeys(value, ctx.showHidden); - braces = ['{', '}']; - if (constructor === 'Object') { - if (isArgumentsObject(value)) { - braces[0] = '[Arguments] {'; - } else if (tag !== '') { - braces[0] = `${getPrefix(constructor, tag, 'Object')}{`; - } - if (keys.length === 0 && protoProps === undefined) { - return `${braces[0]}}`; - } - } else if (typeof value === 'function') { - base = getFunctionBase(value, constructor, tag); - if (keys.length === 0 && protoProps === undefined) - return ctx.stylize(base, 'special'); - } else if (isRegExp(value)) { - // Make RegExps say that they are RegExps - base = RegExpPrototypeToString( - constructor !== null ? value : new RegExp(value) - ); - const prefix = getPrefix(constructor, tag, 'RegExp'); - if (prefix !== 'RegExp ') - base = `${prefix}${base}`; - if ((keys.length === 0 && protoProps === undefined) || - (recurseTimes > ctx.depth && ctx.depth !== null)) { - return ctx.stylize(base, 'regexp'); - } - } else if (isDate(value)) { - // Make dates with properties first say the date - base = NumberIsNaN(DatePrototypeGetTime(value)) ? - DatePrototypeToString(value) : - DatePrototypeToISOString(value); - const prefix = getPrefix(constructor, tag, 'Date'); - if (prefix !== 'Date ') - base = `${prefix}${base}`; - if (keys.length === 0 && protoProps === undefined) { - return ctx.stylize(base, 'date'); - } - } else if (isError(value)) { - base = formatError(value, constructor, tag, ctx, keys); - if (keys.length === 0 && protoProps === undefined) - return base; - } else if (isAnyArrayBuffer(value)) { - // Fast path for ArrayBuffer and SharedArrayBuffer. - // Can't do the same for DataView because it has a non-primitive - // .buffer property that we need to recurse for. - const arrayType = isArrayBuffer(value) ? 'ArrayBuffer' : - 'SharedArrayBuffer'; - const prefix = getPrefix(constructor, tag, arrayType); - if (typedArray === undefined) { - formatter = formatArrayBuffer; - } else if (keys.length === 0 && protoProps === undefined) { - return prefix + - `{ byteLength: ${formatNumber(ctx.stylize, value.byteLength, false)} }`; - } - braces[0] = `${prefix}{`; - ArrayPrototypeUnshift(keys, 'byteLength'); - } else if (isDataView(value)) { - braces[0] = `${getPrefix(constructor, tag, 'DataView')}{`; - // .buffer goes last, it's not a primitive like the others. - ArrayPrototypeUnshift(keys, 'byteLength', 'byteOffset', 'buffer'); - } else if (isPromise(value)) { - braces[0] = `${getPrefix(constructor, tag, 'Promise')}{`; - formatter = formatPromise; - } else if (isWeakSet(value)) { - braces[0] = `${getPrefix(constructor, tag, 'WeakSet')}{`; - formatter = ctx.showHidden ? formatWeakSet : formatWeakCollection; - } else if (isWeakMap(value)) { - braces[0] = `${getPrefix(constructor, tag, 'WeakMap')}{`; - formatter = ctx.showHidden ? formatWeakMap : formatWeakCollection; - } else if (isModuleNamespaceObject(value)) { - braces[0] = `${getPrefix(constructor, tag, 'Module')}{`; - // Special handle keys for namespace objects. - formatter = formatNamespaceObject.bind(null, keys); - } else if (isBoxedPrimitive(value)) { - base = getBoxedBase(value, ctx, keys, constructor, tag); - if (keys.length === 0 && protoProps === undefined) { - return base; - } - } else { - if (keys.length === 0 && protoProps === undefined) { - if (isExternal(value)) { - const address = getExternalValue(value).toString(16); - return ctx.stylize(`[External: ${address}]`, 'special'); - } - return `${getCtxStyle(value, constructor, tag)}{}`; - } - braces[0] = `${getCtxStyle(value, constructor, tag)}{`; - } - } - - if (recurseTimes > ctx.depth && ctx.depth !== null) { - let constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - if (constructor !== null) - constructorName = `[${constructorName}]`; - return ctx.stylize(constructorName, 'special'); - } - recurseTimes += 1; - - ctx.seen.push(value); - ctx.currentDepth = recurseTimes; - let output; - const indentationLvl = ctx.indentationLvl; - try { - output = formatter(ctx, value, recurseTimes); - for (i = 0; i < keys.length; i++) { - output.push( - formatProperty(ctx, value, recurseTimes, keys[i], extrasType)); - } - if (protoProps !== undefined) { - output.push(...protoProps); - } - } catch (err) { - const constructorName = getCtxStyle(value, constructor, tag).slice(0, -1); - return handleMaxCallStackSize(ctx, err, constructorName, indentationLvl); - } - if (ctx.circular !== undefined) { - const index = ctx.circular.get(value); - if (index !== undefined) { - const reference = ctx.stylize(``, 'special'); - // Add reference always to the very beginning of the output. - if (ctx.compact !== true) { - base = base === '' ? reference : `${reference} ${base}`; - } else { - braces[0] = `${reference} ${braces[0]}`; - } - } - } - ctx.seen.pop(); - - if (ctx.sorted) { - const comparator = ctx.sorted === true ? undefined : ctx.sorted; - if (extrasType === kObjectType) { - output = output.sort(comparator); - } else if (keys.length > 1) { - const sorted = output.slice(output.length - keys.length).sort(comparator); - output.splice(output.length - keys.length, keys.length, ...sorted); - } - } - - const res = reduceToSingleString( - ctx, output, base, braces, extrasType, recurseTimes, value); - const budget = ctx.budget[ctx.indentationLvl] || 0; - const newLength = budget + res.length; - ctx.budget[ctx.indentationLvl] = newLength; - // If any indentationLvl exceeds this limit, limit further inspecting to the - // minimum. Otherwise the recursive algorithm might continue inspecting the - // object even though the maximum string size (~2 ** 28 on 32 bit systems and - // ~2 ** 30 on 64 bit systems) exceeded. The actual output is not limited at - // exactly 2 ** 27 but a bit higher. This depends on the object shape. - // This limit also makes sure that huge objects don't block the event loop - // significantly. - if (newLength > 2 ** 27) { - ctx.depth = -1; - } - return res; -} - -function getIteratorBraces(type, tag) { - if (tag !== `${type} Iterator`) { - if (tag !== '') - tag += '] ['; - tag += `${type} Iterator`; - } - return [`[${tag}] {`, '}']; -} - -function getBoxedBase(value, ctx, keys, constructor, tag) { - let fn; - let type; - if (isNumberObject(value)) { - fn = NumberPrototypeValueOf; - type = 'Number'; - } else if (isStringObject(value)) { - fn = StringPrototypeValueOf; - type = 'String'; - // For boxed Strings, we have to remove the 0-n indexed entries, - // since they just noisy up the output and are redundant - // Make boxed primitive Strings look like such - keys.splice(0, value.length); - } else if (isBooleanObject(value)) { - fn = BooleanPrototypeValueOf; - type = 'Boolean'; - } else if (isBigIntObject(value)) { - fn = BigIntPrototypeValueOf; - type = 'BigInt'; - } else { - fn = SymbolPrototypeValueOf; - type = 'Symbol'; - } - let base = `[${type}`; - if (type !== constructor) { - if (constructor === null) { - base += ' (null prototype)'; - } else { - base += ` (${constructor})`; - } - } - base += `: ${formatPrimitive(stylizeNoColor, fn(value), ctx)}]`; - if (tag !== '' && tag !== constructor) { - base += ` [${tag}]`; - } - if (keys.length !== 0 || ctx.stylize === stylizeNoColor) - return base; - return ctx.stylize(base, StringPrototypeToLowerCase(type)); -} - -function getClassBase(value, constructor, tag) { - const hasName = ObjectPrototypeHasOwnProperty(value, 'name'); - const name = (hasName && value.name) || '(anonymous)'; - let base = `class ${name}`; - if (constructor !== 'Function' && constructor !== null) { - base += ` [${constructor}]`; - } - if (tag !== '' && constructor !== tag) { - base += ` [${tag}]`; - } - if (constructor !== null) { - const superName = ObjectGetPrototypeOf(value).name; - if (superName) { - base += ` extends ${superName}`; - } - } else { - base += ' extends [null prototype]'; - } - return `[${base}]`; -} - -function getFunctionBase(value, constructor, tag) { - const stringified = FunctionPrototypeToString(value); - if (stringified.startsWith('class') && stringified.endsWith('}')) { - const slice = stringified.slice(5, -1); - const bracketIndex = slice.indexOf('{'); - if (bracketIndex !== -1 && - (!slice.slice(0, bracketIndex).includes('(') || - // Slow path to guarantee that it's indeed a class. - classRegExp.test(slice.replace(stripCommentsRegExp)))) { - return getClassBase(value, constructor, tag); - } - } - let type = 'Function'; - if (isGeneratorFunction(value)) { - type = `Generator${type}`; - } - if (isAsyncFunction(value)) { - type = `Async${type}`; - } - let base = `[${type}`; - if (constructor === null) { - base += ' (null prototype)'; - } - if (value.name === '') { - base += ' (anonymous)'; - } else { - base += `: ${value.name}`; - } - base += ']'; - if (constructor !== type && constructor !== null) { - base += ` ${constructor}`; - } - if (tag !== '' && constructor !== tag) { - base += ` [${tag}]`; - } - return base; -} - -function identicalSequenceRange(a, b) { - for (let i = 0; i < a.length - 3; i++) { - // Find the first entry of b that matches the current entry of a. - const pos = b.indexOf(a[i]); - if (pos !== -1) { - const rest = b.length - pos; - if (rest > 3) { - let len = 1; - const maxLen = MathMin(a.length - i, rest); - // Count the number of consecutive entries. - while (maxLen > len && a[i + len] === b[pos + len]) { - len++; - } - if (len > 3) { - return { len, offset: i }; - } - } - } - } - - return { len: 0, offset: 0 }; -} - -function getStackString(error) { - return error.stack ? String(error.stack) : ErrorPrototypeToString(error); -} - -function getStackFrames(ctx, err, stack) { - const frames = stack.split('\n'); - - // Remove stack frames identical to frames in cause. - if (err.cause && isError(err.cause)) { - const causeStack = getStackString(err.cause); - const causeStackStart = causeStack.indexOf('\n at'); - if (causeStackStart !== -1) { - const causeFrames = causeStack.slice(causeStackStart + 1).split('\n'); - const { len, offset } = identicalSequenceRange(frames, causeFrames); - if (len > 0) { - const skipped = len - 2; - const msg = ` ... ${skipped} lines matching cause stack trace ...`; - frames.splice(offset + 1, skipped, ctx.stylize(msg, 'undefined')); - } - } - } - return frames; -} - -function improveStack(stack, constructor, name, tag) { - // A stack trace may contain arbitrary data. Only manipulate the output - // for "regular errors" (errors that "look normal") for now. - let len = name.length; - - if (constructor === null || - (name.endsWith('Error') && - stack.startsWith(name) && - (stack.length === len || stack[len] === ':' || stack[len] === '\n'))) { - let fallback = 'Error'; - if (constructor === null) { - const start = stack.match(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/) || - stack.match(/^([a-z_A-Z0-9-]*Error)$/); - fallback = (start && start[1]) || ''; - len = fallback.length; - fallback = fallback || 'Error'; - } - const prefix = getPrefix(constructor, tag, fallback).slice(0, -1); - if (name !== prefix) { - if (prefix.includes(name)) { - if (len === 0) { - stack = `${prefix}: ${stack}`; - } else { - stack = `${prefix}${stack.slice(len)}`; - } - } else { - stack = `${prefix} [${name}]${stack.slice(len)}`; - } - } - } - return stack; -} - -function removeDuplicateErrorKeys(ctx, keys, err, stack) { - if (!ctx.showHidden && keys.length !== 0) { - for (const name of ['name', 'message', 'stack']) { - const index = keys.indexOf(name); - // Only hide the property in case it's part of the original stack - if (index !== -1 && stack.includes(err[name])) { - keys.splice(index, 1); - } - } - } -} - -function formatError(err, constructor, tag, ctx, keys) { - const name = err.name != null ? String(err.name) : 'Error'; - let stack = getStackString(err); - - removeDuplicateErrorKeys(ctx, keys, err, stack); - - if ('cause' in err && - (keys.length === 0 || !keys.includes('cause'))) { - keys.push('cause'); - } - - stack = improveStack(stack, constructor, name, tag); - - // Ignore the error message if it's contained in the stack. - let pos = (err.message && stack.indexOf(err.message)) || -1; - if (pos !== -1) - pos += err.message.length; - // Wrap the error in brackets in case it has no stack trace. - const stackStart = stack.indexOf('\n at', pos); - if (stackStart === -1) { - stack = `[${stack}]`; - } else { - let newStack = stack.slice(0, stackStart); - const lines = getStackFrames(ctx, err, stack.slice(stackStart + 1)); - if (ctx.colors) { - // Highlight userland code and node modules. - for (const line of lines) { - const core = line.match(coreModuleRegExp); - if (core !== null && NativeModule.exists(core[1])) { - newStack += `\n${ctx.stylize(line, 'undefined')}`; - } else { - // This adds underscores to all node_modules to quickly identify them. - let nodeModule; - newStack += '\n'; - let pos = 0; - while ((nodeModule = nodeModulesRegExp.exec(line)) !== null) { - // '/node_modules/'.length === 14 - newStack += line.slice(pos, nodeModule.index + 14); - newStack += ctx.stylize(nodeModule[1], 'module'); - pos = nodeModule.index + nodeModule[0].length; - } - newStack += pos === 0 ? line : line.slice(pos); - } - } - } else { - newStack += `\n${lines.join('\n')}`; - } - stack = newStack; - } - // The message and the stack have to be indented as well! - if (ctx.indentationLvl !== 0) { - const indentation = ' '.repeat(ctx.indentationLvl); - stack = stack.replace(/\n/g, `\n${indentation}`); - } - return stack; -} - -function groupArrayElements(ctx, output, value) { - let totalLength = 0; - let maxLength = 0; - let i = 0; - let outputLength = output.length; - if (ctx.maxArrayLength < output.length) { - // This makes sure the "... n more items" part is not taken into account. - outputLength--; - } - const separatorSpace = 2; // Add 1 for the space and 1 for the separator. - const dataLen = new Array(outputLength); - // Calculate the total length of all output entries and the individual max - // entries length of all output entries. We have to remove colors first, - // otherwise the length would not be calculated properly. - for (; i < outputLength; i++) { - const len = getStringWidth(output[i], ctx.colors); - dataLen[i] = len; - totalLength += len + separatorSpace; - if (maxLength < len) - maxLength = len; - } - // Add two to `maxLength` as we add a single whitespace character plus a comma - // in-between two entries. - const actualMax = maxLength + separatorSpace; - // Check if at least three entries fit next to each other and prevent grouping - // of arrays that contains entries of very different length (i.e., if a single - // entry is longer than 1/5 of all other entries combined). Otherwise the - // space in-between small entries would be enormous. - if (actualMax * 3 + ctx.indentationLvl < ctx.breakLength && - (totalLength / actualMax > 5 || maxLength <= 6)) { - - const approxCharHeights = 2.5; - const averageBias = MathSqrt(actualMax - totalLength / output.length); - const biasedMax = MathMax(actualMax - 3 - averageBias, 1); - // Dynamically check how many columns seem possible. - const columns = MathMin( - // Ideally a square should be drawn. We expect a character to be about 2.5 - // times as high as wide. This is the area formula to calculate a square - // which contains n rectangles of size `actualMax * approxCharHeights`. - // Divide that by `actualMax` to receive the correct number of columns. - // The added bias increases the columns for short entries. - MathRound( - MathSqrt( - approxCharHeights * biasedMax * outputLength - ) / biasedMax - ), - // Do not exceed the breakLength. - MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), - // Limit array grouping for small `compact` modes as the user requested - // minimal grouping. - ctx.compact * 4, - // Limit the columns to a maximum of fifteen. - 15 - ); - // Return with the original output if no grouping should happen. - if (columns <= 1) { - return output; - } - const tmp = []; - const maxLineLength = []; - for (let i = 0; i < columns; i++) { - let lineMaxLength = 0; - for (let j = i; j < output.length; j += columns) { - if (dataLen[j] > lineMaxLength) - lineMaxLength = dataLen[j]; - } - lineMaxLength += separatorSpace; - maxLineLength[i] = lineMaxLength; - } - let order = StringPrototypePadStart; - if (value !== undefined) { - for (let i = 0; i < output.length; i++) { - if (typeof value[i] !== 'number' && typeof value[i] !== 'bigint') { - order = StringPrototypePadEnd; - break; - } - } - } - // Each iteration creates a single line of grouped entries. - for (let i = 0; i < outputLength; i += columns) { - // The last lines may contain less entries than columns. - const max = MathMin(i + columns, outputLength); - let str = ''; - let j = i; - for (; j < max - 1; j++) { - // Calculate extra color padding in case it's active. This has to be - // done line by line as some lines might contain more colors than - // others. - const padding = maxLineLength[j - i] + output[j].length - dataLen[j]; - str += order(`${output[j]}, `, padding, ' '); - } - if (order === StringPrototypePadStart) { - const padding = maxLineLength[j - i] + - output[j].length - - dataLen[j] - - separatorSpace; - str += StringPrototypePadStart(output[j], padding, ' '); - } else { - str += output[j]; - } - ArrayPrototypePush(tmp, str); - } - if (ctx.maxArrayLength < output.length) { - ArrayPrototypePush(tmp, output[outputLength]); - } - output = tmp; - } - return output; -} - -function handleMaxCallStackSize(ctx, err, constructorName, indentationLvl) { - if (isStackOverflowError(err)) { - ctx.seen.pop(); - ctx.indentationLvl = indentationLvl; - return ctx.stylize( - `[${constructorName}: Inspection interrupted ` + - 'prematurely. Maximum call stack size exceeded.]', - 'special' - ); - } - /* c8 ignore next */ - assert.fail(err.stack); -} - -function addNumericSeparator(integerString) { - let result = ''; - let i = integerString.length; - const start = integerString.startsWith('-') ? 1 : 0; - for (; i >= start + 4; i -= 3) { - result = `_${integerString.slice(i - 3, i)}${result}`; - } - return i === integerString.length ? - integerString : - `${integerString.slice(0, i)}${result}`; -} - -function addNumericSeparatorEnd(integerString) { - let result = ''; - let i = 0; - for (; i < integerString.length - 3; i += 3) { - result += `${integerString.slice(i, i + 3)}_`; - } - return i === 0 ? - integerString : - `${result}${integerString.slice(i)}`; -} - -function formatNumber(fn, number, numericSeparator) { - if (!numericSeparator) { - // Format -0 as '-0'. Checking `number === -0` won't distinguish 0 from -0. - if (ObjectIs(number, -0)) { - return fn('-0', 'number'); - } - return fn(`${number}`, 'number'); - } - const integer = MathTrunc(number); - const string = String(integer); - if (integer === number) { - if (!NumberIsFinite(number) || string.includes('e')) { - return fn(string, 'number'); - } - return fn(`${addNumericSeparator(string)}`, 'number'); - } - if (NumberIsNaN(number)) { - return fn(string, 'number'); - } - return fn(`${ - addNumericSeparator(string) - }.${ - addNumericSeparatorEnd(String(number).slice(string.length + 1)) - }`, 'number'); -} - -function formatBigInt(fn, bigint, numericSeparator) { - const string = String(bigint); - if (!numericSeparator) { - return fn(`${string}n`, 'bigint'); - } - return fn(`${addNumericSeparator(string)}n`, 'bigint'); -} - -function formatPrimitive(fn, value, ctx) { - if (typeof value === 'string') { - let trailer = ''; - if (value.length > ctx.maxStringLength) { - const remaining = value.length - ctx.maxStringLength; - value = value.slice(0, ctx.maxStringLength); - trailer = `... ${remaining} more character${remaining > 1 ? 's' : ''}`; - } - if (ctx.compact !== true && - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. - value.length > kMinLineLength && - value.length > ctx.breakLength - ctx.indentationLvl - 4) { - return value - .split(/(?<=\n)/) - .map((line) => fn(strEscape(line), 'string')) - .join(` +\n${' '.repeat(ctx.indentationLvl + 2)}`) + trailer; - } - return fn(strEscape(value), 'string') + trailer; - } - if (typeof value === 'number') - return formatNumber(fn, value, ctx.numericSeparator); - if (typeof value === 'bigint') - return formatBigInt(fn, value, ctx.numericSeparator); - if (typeof value === 'boolean') - return fn(`${value}`, 'boolean'); - if (typeof value === 'undefined') - return fn('undefined', 'undefined'); - // es6 symbol primitive - return fn(SymbolPrototypeToString(value), 'symbol'); -} - -function formatNamespaceObject(keys, ctx, value, recurseTimes) { - const output = new Array(keys.length); - for (let i = 0; i < keys.length; i++) { - try { - output[i] = formatProperty(ctx, value, recurseTimes, keys[i], - kObjectType); - } catch (err) { - assert(isNativeError(err) && err.name === 'ReferenceError'); - // Use the existing functionality. This makes sure the indentation and - // line breaks are always correct. Otherwise it is very difficult to keep - // this aligned, even though this is a hacky way of dealing with this. - const tmp = { [keys[i]]: '' }; - output[i] = formatProperty(ctx, tmp, recurseTimes, keys[i], kObjectType); - const pos = output[i].lastIndexOf(' '); - // We have to find the last whitespace and have to replace that value as - // it will be visualized as a regular string. - output[i] = output[i].slice(0, pos + 1) + - ctx.stylize('', 'special'); - } - } - // Reset the keys to an empty array. This prevents duplicated inspection. - keys.length = 0; - return output; -} - -// The array is sparse and/or has extra keys -function formatSpecialArray(ctx, value, recurseTimes, maxLength, output, i) { - const keys = ObjectKeys(value); - let index = i; - for (; i < keys.length && output.length < maxLength; i++) { - const key = keys[i]; - const tmp = +key; - // Arrays can only have up to 2^32 - 1 entries - if (tmp > 2 ** 32 - 2) { - break; - } - if (`${index}` !== key) { - if (!numberRegExp.test(key)) { - break; - } - const emptyItems = tmp - index; - const ending = emptyItems > 1 ? 's' : ''; - const message = `<${emptyItems} empty item${ending}>`; - output.push(ctx.stylize(message, 'undefined')); - index = tmp; - if (output.length === maxLength) { - break; - } - } - output.push(formatProperty(ctx, value, recurseTimes, key, kArrayType)); - index++; - } - const remaining = value.length - index; - if (output.length !== maxLength) { - if (remaining > 0) { - const ending = remaining > 1 ? 's' : ''; - const message = `<${remaining} empty item${ending}>`; - output.push(ctx.stylize(message, 'undefined')); - } - } else if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); - } - return output; -} - -function formatArrayBuffer(ctx, value) { - let buffer; - try { - buffer = new Uint8Array(value); - } catch { - return [ctx.stylize('(detached)', 'special')]; - } - if (hexSlice === undefined) - hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); - let str = StringPrototypeTrim(StringPrototypeReplace( - hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)), - /(.{2})/g, '$1 ')); - const remaining = buffer.length - ctx.maxArrayLength; - if (remaining > 0) - str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; - return [`${ctx.stylize('[Uint8Contents]', 'special')}: <${str}>`]; -} - -function formatArray(ctx, value, recurseTimes) { - const valLen = value.length; - const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); - - const remaining = valLen - len; - const output = []; - for (let i = 0; i < len; i++) { - // Special handle sparse arrays. - if (!ObjectPrototypeHasOwnProperty(value, i)) { - return formatSpecialArray(ctx, value, recurseTimes, len, output, i); - } - output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); - } - if (remaining > 0) - output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); - return output; -} - -function formatTypedArray(value, length, ctx, ignored, recurseTimes) { - const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), length); - const remaining = value.length - maxLength; - const output = new Array(maxLength); - const elementFormatter = value.length > 0 && typeof value[0] === 'number' ? - formatNumber : - formatBigInt; - for (let i = 0; i < maxLength; ++i) { - output[i] = elementFormatter(ctx.stylize, value[i], ctx.numericSeparator); - } - if (remaining > 0) { - output[maxLength] = `... ${remaining} more item${remaining > 1 ? 's' : ''}`; - } - if (ctx.showHidden) { - // .buffer goes last, it's not a primitive like the others. - // All besides `BYTES_PER_ELEMENT` are actually getters. - ctx.indentationLvl += 2; - for (const key of [ - 'BYTES_PER_ELEMENT', - 'length', - 'byteLength', - 'byteOffset', - 'buffer', - ]) { - const str = formatValue(ctx, value[key], recurseTimes, true); - ArrayPrototypePush(output, `[${key}]: ${str}`); - } - ctx.indentationLvl -= 2; - } - return output; -} - -function formatSet(value, ctx, ignored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const v of value) { - ArrayPrototypePush(output, formatValue(ctx, v, recurseTimes)); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatMap(value, ctx, ignored, recurseTimes) { - const output = []; - ctx.indentationLvl += 2; - for (const { 0: k, 1: v } of value) { - output.push( - `${formatValue(ctx, k, recurseTimes)} => ${formatValue(ctx, v, recurseTimes)}` - ); - } - ctx.indentationLvl -= 2; - return output; -} - -function formatSetIterInner(ctx, recurseTimes, entries, state) { - const maxArrayLength = MathMax(ctx.maxArrayLength, 0); - const maxLength = MathMin(maxArrayLength, entries.length); - const output = new Array(maxLength); - ctx.indentationLvl += 2; - for (let i = 0; i < maxLength; i++) { - output[i] = formatValue(ctx, entries[i], recurseTimes); - } - ctx.indentationLvl -= 2; - if (state === kWeak && !ctx.sorted) { - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - ArrayPrototypeSort(output); - } - const remaining = entries.length - maxLength; - if (remaining > 0) { - ArrayPrototypePush(output, - `... ${remaining} more item${remaining > 1 ? 's' : ''}`); - } - return output; -} - -function formatMapIterInner(ctx, recurseTimes, entries, state) { - const maxArrayLength = MathMax(ctx.maxArrayLength, 0); - // Entries exist as [key1, val1, key2, val2, ...] - const len = entries.length / 2; - const remaining = len - maxArrayLength; - const maxLength = MathMin(maxArrayLength, len); - let output = new Array(maxLength); - let i = 0; - ctx.indentationLvl += 2; - if (state === kWeak) { - for (; i < maxLength; i++) { - const pos = i * 2; - output[i] = - `${formatValue(ctx, entries[pos], recurseTimes)} => ${formatValue(ctx, entries[pos + 1], recurseTimes)}`; - } - // Sort all entries to have a halfway reliable output (if more entries than - // retrieved ones exist, we can not reliably return the same output) if the - // output is not sorted anyway. - if (!ctx.sorted) - output = output.sort(); - } else { - for (; i < maxLength; i++) { - const pos = i * 2; - const res = [ - formatValue(ctx, entries[pos], recurseTimes), - formatValue(ctx, entries[pos + 1], recurseTimes), - ]; - output[i] = reduceToSingleString( - ctx, res, '', ['[', ']'], kArrayExtrasType, recurseTimes); - } - } - ctx.indentationLvl -= 2; - if (remaining > 0) { - output.push(`... ${remaining} more item${remaining > 1 ? 's' : ''}`); - } - return output; -} - -function formatWeakCollection(ctx) { - return [ctx.stylize('', 'special')]; -} - -function formatWeakSet(ctx, value, recurseTimes) { - const entries = previewEntries(value); - return formatSetIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatWeakMap(ctx, value, recurseTimes) { - const entries = previewEntries(value); - return formatMapIterInner(ctx, recurseTimes, entries, kWeak); -} - -function formatIterator(braces, ctx, value, recurseTimes) { - const { 0: entries, 1: isKeyValue } = previewEntries(value, true); - if (isKeyValue) { - // Mark entry iterators as such. - braces[0] = braces[0].replace(/ Iterator] {$/, ' Entries] {'); - return formatMapIterInner(ctx, recurseTimes, entries, kMapEntries); - } - - return formatSetIterInner(ctx, recurseTimes, entries, kIterator); -} - -function formatPromise(ctx, value, recurseTimes) { - let output; - const { 0: state, 1: result } = getPromiseDetails(value); - if (state === kPending) { - output = [ctx.stylize('', 'special')]; - } else { - ctx.indentationLvl += 2; - const str = formatValue(ctx, result, recurseTimes); - ctx.indentationLvl -= 2; - output = [ - state === kRejected ? - `${ctx.stylize('', 'special')} ${str}` : - str, - ]; - } - return output; -} - -function formatProperty(ctx, value, recurseTimes, key, type, desc, - original = value) { - let name, str; - let extra = ' '; - desc = desc || ObjectGetOwnPropertyDescriptor(value, key) || - { value: value[key], enumerable: true }; - if (desc.value !== undefined) { - const diff = (ctx.compact !== true || type !== kObjectType) ? 2 : 3; - ctx.indentationLvl += diff; - str = formatValue(ctx, desc.value, recurseTimes); - if (diff === 3 && ctx.breakLength < getStringWidth(str, ctx.colors)) { - extra = `\n${' '.repeat(ctx.indentationLvl)}`; - } - ctx.indentationLvl -= diff; - } else if (desc.get !== undefined) { - const label = desc.set !== undefined ? 'Getter/Setter' : 'Getter'; - const s = ctx.stylize; - const sp = 'special'; - if (ctx.getters && (ctx.getters === true || - (ctx.getters === 'get' && desc.set === undefined) || - (ctx.getters === 'set' && desc.set !== undefined))) { - try { - const tmp = FunctionPrototypeCall(desc.get, original); - ctx.indentationLvl += 2; - if (tmp === null) { - str = `${s(`[${label}:`, sp)} ${s('null', 'null')}${s(']', sp)}`; - } else if (typeof tmp === 'object') { - str = `${s(`[${label}]`, sp)} ${formatValue(ctx, tmp, recurseTimes)}`; - } else { - const primitive = formatPrimitive(s, tmp, ctx); - str = `${s(`[${label}:`, sp)} ${primitive}${s(']', sp)}`; - } - ctx.indentationLvl -= 2; - } catch (err) { - const message = ``; - str = `${s(`[${label}:`, sp)} ${message}${s(']', sp)}`; - } - } else { - str = ctx.stylize(`[${label}]`, sp); - } - } else if (desc.set !== undefined) { - str = ctx.stylize('[Setter]', 'special'); - } else { - str = ctx.stylize('undefined', 'undefined'); - } - if (type === kArrayType) { - return str; - } - if (typeof key === 'symbol') { - const tmp = StringPrototypeReplace( - SymbolPrototypeToString(key), - strEscapeSequencesReplacer, escapeFn - ); - name = `[${ctx.stylize(tmp, 'symbol')}]`; - } else if (key === '__proto__') { - name = "['__proto__']"; - } else if (desc.enumerable === false) { - const tmp = StringPrototypeReplace(key, - strEscapeSequencesReplacer, escapeFn); - name = `[${tmp}]`; - } else if (RegExpPrototypeTest(keyStrRegExp, key)) { - name = ctx.stylize(key, 'name'); - } else { - name = ctx.stylize(strEscape(key), 'string'); - } - return `${name}:${extra}${str}`; -} - -function isBelowBreakLength(ctx, output, start, base) { - // Each entry is separated by at least a comma. Thus, we start with a total - // length of at least `output.length`. In addition, some cases have a - // whitespace in-between each other that is added to the total as well. - // TODO(BridgeAR): Add unicode support. Use the readline getStringWidth - // function. Check the performance overhead and make it an opt-in in case it's - // significant. - let totalLength = output.length + start; - if (totalLength + output.length > ctx.breakLength) - return false; - for (let i = 0; i < output.length; i++) { - if (ctx.colors) { - totalLength += removeColors(output[i]).length; - } else { - totalLength += output[i].length; - } - if (totalLength > ctx.breakLength) { - return false; - } - } - // Do not line up properties on the same line if `base` contains line breaks. - return base === '' || !StringPrototypeIncludes(base, '\n'); -} - -function reduceToSingleString( - ctx, output, base, braces, extrasType, recurseTimes, value) { - if (ctx.compact !== true) { - if (typeof ctx.compact === 'number' && ctx.compact >= 1) { - // Memorize the original output length. In case the output is grouped, - // prevent lining up the entries on a single line. - const entries = output.length; - // Group array elements together if the array contains at least six - // separate entries. - if (extrasType === kArrayExtrasType && entries > 6) { - output = groupArrayElements(ctx, output, value); - } - // `ctx.currentDepth` is set to the most inner depth of the currently - // inspected object part while `recurseTimes` is the actual current depth - // that is inspected. - // - // Example: - // - // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } } - // - // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max - // depth of 1. - // - // Consolidate all entries of the local most inner depth up to - // `ctx.compact`, as long as the properties are smaller than - // `ctx.breakLength`. - if (ctx.currentDepth - recurseTimes < ctx.compact && - entries === output.length) { - // Line up all entries on a single line in case the entries do not - // exceed `breakLength`. Add 10 as constant to start next to all other - // factors that may reduce `breakLength`. - const start = output.length + ctx.indentationLvl + - braces[0].length + base.length + 10; - if (isBelowBreakLength(ctx, output, start, base)) { - const joinedOutput = join(output, ', '); - if (!joinedOutput.includes('\n')) { - return `${base ? `${base} ` : ''}${braces[0]} ${joinedOutput}` + - ` ${braces[1]}`; - } - } - } - } - // Line up each entry on an individual line. - const indentation = `\n${StringPrototypeRepeat(' ', ctx.indentationLvl)}`; - return `${base ? `${base} ` : ''}${braces[0]}${indentation} ` + - `${join(output, `,${indentation} `)}${indentation}${braces[1]}`; - } - // Line up all entries on a single line in case the entries do not exceed - // `breakLength`. - if (isBelowBreakLength(ctx, output, 0, base)) { - return `${braces[0]}${base ? ` ${base}` : ''} ${join(output, ', ')} ` + - braces[1]; - } - const indentation = StringPrototypeRepeat(' ', ctx.indentationLvl); - // If the opening "brace" is too large, like in the case of "Set {", - // we need to force the first item to be on the next line or the - // items will not line up correctly. - const ln = base === '' && braces[0].length === 1 ? - ' ' : `${base ? ` ${base}` : ''}\n${indentation} `; - // Line up each entry on an individual line. - return `${braces[0]}${ln}${join(output, `,\n${indentation} `)} ${braces[1]}`; -} - -function hasBuiltInToString(value) { - // Prevent triggering proxy traps. - const getFullProxy = false; - const proxyTarget = getProxyDetails(value, getFullProxy); - if (proxyTarget !== undefined) { - value = proxyTarget; - } - - // Count objects that have no `toString` function as built-in. - if (typeof value.toString !== 'function') { - return true; - } - - // The object has a own `toString` property. Thus it's not not a built-in one. - if (ObjectPrototypeHasOwnProperty(value, 'toString')) { - return false; - } - - // Find the object that has the `toString` property as own property in the - // prototype chain. - let pointer = value; - do { - pointer = ObjectGetPrototypeOf(pointer); - } while (!ObjectPrototypeHasOwnProperty(pointer, 'toString')); - - // Check closer if the object is a built-in. - const descriptor = ObjectGetOwnPropertyDescriptor(pointer, 'constructor'); - return descriptor !== undefined && - typeof descriptor.value === 'function' && - builtInObjects.has(descriptor.value.name); -} - -const firstErrorLine = (error) => - StringPrototypeSplit(error.message, '\n', 1)[0]; -let CIRCULAR_ERROR_MESSAGE; -function tryStringify(arg) { - try { - return JSONStringify(arg); - } catch (err) { - // Populate the circular error message lazily - if (!CIRCULAR_ERROR_MESSAGE) { - try { - const a = {}; a.a = a; JSONStringify(a); - } catch (circularError) { - CIRCULAR_ERROR_MESSAGE = firstErrorLine(circularError); - } - } - if (err.name === 'TypeError' && - firstErrorLine(err) === CIRCULAR_ERROR_MESSAGE) { - return '[Circular]'; - } - throw err; - } -} - -function format(...args) { - return formatWithOptionsInternal(undefined, args); -} - -function formatWithOptions(inspectOptions, ...args) { - if (typeof inspectOptions !== 'object' || inspectOptions === null) { - throw new ERR_INVALID_ARG_TYPE( - 'inspectOptions', 'object', inspectOptions); - } - return formatWithOptionsInternal(inspectOptions, args); -} - -function formatNumberNoColor(number, options) { - return formatNumber( - stylizeNoColor, - number, - options?.numericSeparator ?? inspectDefaultOptions.numericSeparator - ); -} - -function formatBigIntNoColor(bigint, options) { - return formatBigInt( - stylizeNoColor, - bigint, - options?.numericSeparator ?? inspectDefaultOptions.numericSeparator - ); -} - -function formatWithOptionsInternal(inspectOptions, args) { - const first = args[0]; - let a = 0; - let str = ''; - let join = ''; - - if (typeof first === 'string') { - if (args.length === 1) { - return first; - } - let tempStr; - let lastPos = 0; - - for (let i = 0; i < first.length - 1; i++) { - if (StringPrototypeCharCodeAt(first, i) === 37) { // '%' - const nextChar = StringPrototypeCharCodeAt(first, ++i); - if (a + 1 !== args.length) { - switch (nextChar) { - case 115: { // 's' - const tempArg = args[++a]; - if (typeof tempArg === 'number') { - tempStr = formatNumberNoColor(tempArg, inspectOptions); - } else if (typeof tempArg === 'bigint') { - tempStr = formatBigIntNoColor(tempArg, inspectOptions); - } else if (typeof tempArg !== 'object' || - tempArg === null || - !hasBuiltInToString(tempArg)) { - tempStr = String(tempArg); - } else { - tempStr = inspect(tempArg, { - ...inspectOptions, - compact: 3, - colors: false, - depth: 0 - }); - } - break; - } - case 106: // 'j' - tempStr = tryStringify(args[++a]); - break; - case 100: { // 'd' - const tempNum = args[++a]; - if (typeof tempNum === 'bigint') { - tempStr = formatBigIntNoColor(tempNum, inspectOptions); - } else if (typeof tempNum === 'symbol') { - tempStr = 'NaN'; - } else { - tempStr = formatNumberNoColor(Number(tempNum), inspectOptions); - } - break; - } - case 79: // 'O' - tempStr = inspect(args[++a], inspectOptions); - break; - case 111: // 'o' - tempStr = inspect(args[++a], { - ...inspectOptions, - showHidden: true, - showProxy: true, - depth: 4 - }); - break; - case 105: { // 'i' - const tempInteger = args[++a]; - if (typeof tempInteger === 'bigint') { - tempStr = formatBigIntNoColor(tempInteger, inspectOptions); - } else if (typeof tempInteger === 'symbol') { - tempStr = 'NaN'; - } else { - tempStr = formatNumberNoColor( - NumberParseInt(tempInteger), inspectOptions); - } - break; - } - case 102: { // 'f' - const tempFloat = args[++a]; - if (typeof tempFloat === 'symbol') { - tempStr = 'NaN'; - } else { - tempStr = formatNumberNoColor( - NumberParseFloat(tempFloat), inspectOptions); - } - break; - } - case 99: // 'c' - a += 1; - tempStr = ''; - break; - case 37: // '%' - str += StringPrototypeSlice(first, lastPos, i); - lastPos = i + 1; - continue; - default: // Any other character is not a correct placeholder - continue; - } - if (lastPos !== i - 1) { - str += StringPrototypeSlice(first, lastPos, i - 1); - } - str += tempStr; - lastPos = i + 1; - } else if (nextChar === 37) { - str += StringPrototypeSlice(first, lastPos, i); - lastPos = i + 1; - } - } - } - if (lastPos !== 0) { - a++; - join = ' '; - if (lastPos < first.length) { - str += StringPrototypeSlice(first, lastPos); - } - } - } - - while (a < args.length) { - const value = args[a]; - str += join; - str += typeof value !== 'string' ? inspect(value, inspectOptions) : value; - join = ' '; - a++; - } - return str; -} - -if (false) { - const icu = {}; - // icu.getStringWidth(string, ambiguousAsFullWidth, expandEmojiSequence) - // Defaults: ambiguousAsFullWidth = false; expandEmojiSequence = true; - // TODO(BridgeAR): Expose the options to the user. That is probably the - // best thing possible at the moment, since it's difficult to know what - // the receiving end supports. - getStringWidth = function getStringWidth(str, removeControlChars = true) { - let width = 0; - - if (removeControlChars) - str = stripVTControlCharacters(str); - for (let i = 0; i < str.length; i++) { - // Try to avoid calling into C++ by first handling the ASCII portion of - // the string. If it is fully ASCII, we skip the C++ part. - const code = str.charCodeAt(i); - if (code >= 127) { - width += icu.getStringWidth(str.slice(i).normalize('NFC')); - break; - } - width += code >= 32 ? 1 : 0; - } - return width; - }; -} else { - /** - * Returns the number of columns required to display the given string. - */ - getStringWidth = function getStringWidth(str, removeControlChars = true) { - let width = 0; - - if (removeControlChars) - str = stripVTControlCharacters(str); - str = StringPrototypeNormalize(str, 'NFC'); - for (const char of new SafeStringIterator(str)) { - const code = StringPrototypeCodePointAt(char, 0); - if (isFullWidthCodePoint(code)) { - width += 2; - } else if (!isZeroWidthCodePoint(code)) { - width++; - } - } - - return width; - }; - - /** - * Returns true if the character represented by a given - * Unicode code point is full-width. Otherwise returns false. - */ - const isFullWidthCodePoint = (code) => { - // Code points are partially derived from: - // https://www.unicode.org/Public/UNIDATA/EastAsianWidth.txt - return code >= 0x1100 && ( - code <= 0x115f || // Hangul Jamo - code === 0x2329 || // LEFT-POINTING ANGLE BRACKET - code === 0x232a || // RIGHT-POINTING ANGLE BRACKET - // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (code >= 0x2e80 && code <= 0x3247 && code !== 0x303f) || - // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - (code >= 0x3250 && code <= 0x4dbf) || - // CJK Unified Ideographs .. Yi Radicals - (code >= 0x4e00 && code <= 0xa4c6) || - // Hangul Jamo Extended-A - (code >= 0xa960 && code <= 0xa97c) || - // Hangul Syllables - (code >= 0xac00 && code <= 0xd7a3) || - // CJK Compatibility Ideographs - (code >= 0xf900 && code <= 0xfaff) || - // Vertical Forms - (code >= 0xfe10 && code <= 0xfe19) || - // CJK Compatibility Forms .. Small Form Variants - (code >= 0xfe30 && code <= 0xfe6b) || - // Halfwidth and Fullwidth Forms - (code >= 0xff01 && code <= 0xff60) || - (code >= 0xffe0 && code <= 0xffe6) || - // Kana Supplement - (code >= 0x1b000 && code <= 0x1b001) || - // Enclosed Ideographic Supplement - (code >= 0x1f200 && code <= 0x1f251) || - // Miscellaneous Symbols and Pictographs 0x1f300 - 0x1f5ff - // Emoticons 0x1f600 - 0x1f64f - (code >= 0x1f300 && code <= 0x1f64f) || - // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - (code >= 0x20000 && code <= 0x3fffd) - ); - }; - - const isZeroWidthCodePoint = (code) => { - return code <= 0x1F || // C0 control codes - (code >= 0x7F && code <= 0x9F) || // C1 control codes - (code >= 0x300 && code <= 0x36F) || // Combining Diacritical Marks - (code >= 0x200B && code <= 0x200F) || // Modifying Invisible Characters - // Combining Diacritical Marks for Symbols - (code >= 0x20D0 && code <= 0x20FF) || - (code >= 0xFE00 && code <= 0xFE0F) || // Variation Selectors - (code >= 0xFE20 && code <= 0xFE2F) || // Combining Half Marks - (code >= 0xE0100 && code <= 0xE01EF); // Variation Selectors - }; -} - -/** - * Remove all VT control characters. Use to estimate displayed string width. - */ -function stripVTControlCharacters(str) { - validateString(str, 'str'); - - return str.replace(ansi, ''); -} - -module.exports = { - inspect, - format, - formatWithOptions, - getStringWidth, - inspectDefaultOptions, - stripVTControlCharacters -}; diff --git a/lib/internal/js_stream_socket.js b/lib/internal/js_stream_socket.js deleted file mode 100644 index 7f27e6d516..0000000000 --- a/lib/internal/js_stream_socket.js +++ /dev/null @@ -1,242 +0,0 @@ -'use strict'; - -const { - Symbol, -} = require('./primordials'); - -const { setImmediate } = require('timers'); -const assert = require('assert'); -const { Socket } = require('net'); -const { JSStream } = process.binding('js_stream'); -const uv = process.binding('uv'); -let debug = require('../util').debuglog( - 'stream_socket', - (fn) => { - debug = fn; - } -); -const { owner_symbol } = require('internal/async_hooks').symbols; -const { ERR_STREAM_WRAP } = require('./errors').codes; - -const kCurrentWriteRequest = Symbol('kCurrentWriteRequest'); -const kCurrentShutdownRequest = Symbol('kCurrentShutdownRequest'); -const kPendingShutdownRequest = Symbol('kPendingShutdownRequest'); - -function isClosing() { return this[owner_symbol].isClosing(); } - -function onreadstart() { return this[owner_symbol].readStart(); } - -function onreadstop() { return this[owner_symbol].readStop(); } - -function onshutdown(req) { return this[owner_symbol].doShutdown(req); } - -function onwrite(req, bufs) { return this[owner_symbol].doWrite(req, bufs); } - -/* This class serves as a wrapper for when the C++ side of Node wants access - * to a standard JS stream. For example, TLS or HTTP do not operate on network - * resources conceptually, although that is the common case and what we are - * optimizing for; in theory, they are completely composable and can work with - * any stream resource they see. - * - * For the common case, i.e. a TLS socket wrapping around a net.Socket, we - * can skip going through the JS layer and let TLS access the raw C++ handle - * of a net.Socket. The flipside of this is that, to maintain composability, - * we need a way to create "fake" net.Socket instances that call back into a - * "real" JavaScript stream. JSStreamSocket is exactly this. - */ -class JSStreamSocket extends Socket { - constructor(stream) { - const handle = new JSStream(); - handle.close = (cb) => { - debug('close'); - this.doClose(cb); - }; - // Inside of the following functions, `this` refers to the handle - // and `this[owner_symbol]` refers to this JSStreamSocket instance. - handle.isClosing = isClosing; - handle.onreadstart = onreadstart; - handle.onreadstop = onreadstop; - handle.onshutdown = onshutdown; - handle.onwrite = onwrite; - - stream.pause(); - stream.on('error', (err) => this.emit('error', err)); - const ondata = (chunk) => { - if (typeof chunk === 'string' || - stream.readableObjectMode === true) { - // Make sure that no further `data` events will happen. - stream.pause(); - stream.removeListener('data', ondata); - - this.emit('error', new ERR_STREAM_WRAP()); - return; - } - - debug('data', chunk.length); - if (this._handle) - this._handle.readBuffer(chunk); - }; - stream.on('data', ondata); - stream.once('end', () => { - debug('end'); - if (this._handle) - this._handle.emitEOF(); - }); - // Some `Stream` don't pass `hasError` parameters when closed. - stream.once('close', () => { - // Errors emitted from `stream` have also been emitted to this instance - // so that we don't pass errors to `destroy()` again. - this.destroy(); - }); - - super({ handle, manualStart: true }); - this.stream = stream; - this[kCurrentWriteRequest] = null; - this[kCurrentShutdownRequest] = null; - this[kPendingShutdownRequest] = null; - this.readable = stream.readable; - this.writable = stream.writable; - - // Start reading. - this.read(0); - } - - // Allow legacy requires in the test suite to keep working: - // const { StreamWrap } = require('./js_stream_socket') - static get StreamWrap() { - return JSStreamSocket; - } - - isClosing() { - return !this.readable || !this.writable; - } - - readStart() { - this.stream.resume(); - return 0; - } - - readStop() { - this.stream.pause(); - return 0; - } - - doShutdown(req) { - // TODO(addaleax): It might be nice if we could get into a state where - // DoShutdown() is not called on streams while a write is still pending. - // - // Currently, the only part of the code base where that happens is the - // TLS implementation, which calls both DoWrite() and DoShutdown() on the - // underlying network stream inside of its own DoShutdown() method. - // Working around that on the native side is not quite trivial (yet?), - // so for now that is supported here. - - if (this[kCurrentWriteRequest] !== null) { - this[kPendingShutdownRequest] = req; - return 0; - } - assert(this[kCurrentWriteRequest] === null); - assert(this[kCurrentShutdownRequest] === null); - this[kCurrentShutdownRequest] = req; - - const handle = this._handle; - - setImmediate(() => { - // Ensure that write is dispatched asynchronously. - this.stream.end(() => { - this.finishShutdown(handle, 0); - }); - }); - return 0; - } - - // handle === this._handle except when called from doClose(). - finishShutdown(handle, errCode) { - // The shutdown request might already have been cancelled. - if (this[kCurrentShutdownRequest] === null) - return; - const req = this[kCurrentShutdownRequest]; - this[kCurrentShutdownRequest] = null; - handle.finishShutdown(req, errCode); - } - - doWrite(req, bufs) { - assert(this[kCurrentWriteRequest] === null); - assert(this[kCurrentShutdownRequest] === null); - - const handle = this._handle; - const self = this; - - let pending = bufs.length; - - this.stream.cork(); - // Use `var` over `let` for performance optimization. - // eslint-disable-next-line no-var - for (var i = 0; i < bufs.length; ++i) - this.stream.write(bufs[i], done); - this.stream.uncork(); - - // Only set the request here, because the `write()` calls could throw. - this[kCurrentWriteRequest] = req; - - function done(err) { - if (!err && --pending !== 0) - return; - - // Ensure that this is called once in case of error - pending = 0; - - let errCode = 0; - if (err) { - errCode = uv[`UV_${err.code}`] || uv.UV_EPIPE; - } - - // Ensure that write was dispatched - setImmediate(() => { - self.finishWrite(handle, errCode); - }); - } - - return 0; - } - - // handle === this._handle except when called from doClose(). - finishWrite(handle, errCode) { - // The write request might already have been cancelled. - if (this[kCurrentWriteRequest] === null) - return; - const req = this[kCurrentWriteRequest]; - this[kCurrentWriteRequest] = null; - - handle.finishWrite(req, errCode); - if (this[kPendingShutdownRequest]) { - const req = this[kPendingShutdownRequest]; - this[kPendingShutdownRequest] = null; - this.doShutdown(req); - } - } - - doClose(cb) { - const handle = this._handle; - - // When sockets of the "net" module destroyed, they will call - // `this._handle.close()` which will also emit EOF if not emitted before. - // This feature makes sockets on the other side emit "end" and "close" - // even though we haven't called `end()`. As `stream` are likely to be - // instances of `net.Socket`, calling `stream.destroy()` manually will - // avoid issues that don't properly close wrapped connections. - this.stream.destroy(); - - setImmediate(() => { - // Should be already set by net.js - assert(this._handle === null); - - this.finishWrite(handle, uv.UV_ECANCELED); - this.finishShutdown(handle, uv.UV_ECANCELED); - - cb(); - }); - } -} - -module.exports = JSStreamSocket; diff --git a/lib/internal/primordials.js b/lib/internal/primordials.js deleted file mode 100644 index eded9f2f64..0000000000 --- a/lib/internal/primordials.js +++ /dev/null @@ -1,446 +0,0 @@ - - 'use strict'; - - const primordials = module.exports = {} - - -/* eslint-disable node-core/prefer-primordials */ - -// This file subclasses and stores the JS builtins that come from the VM -// so that Node.js's builtin modules do not need to later look these up from -// the global proxy, which can be mutated by users. - -// Use of primordials have sometimes a dramatic impact on performance, please -// benchmark all changes made in performance-sensitive areas of the codebase. -// See: https://github.com/nodejs/node/pull/38248 - -const { - defineProperty: ReflectDefineProperty, - getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor, - ownKeys: ReflectOwnKeys, -} - = Reflect; - - if (typeof AggregateError === 'undefined') { - globalThis.AggregateError = require('aggregate-error'); - } - - -// `uncurryThis` is equivalent to `func => Function.prototype.call.bind(func)`. -// It is using `bind.bind(call)` to avoid using `Function.prototype.bind` -// and `Function.prototype.call` after it may have been mutated by users. -const { apply, bind, call } = Function.prototype; -const uncurryThis = bind.bind(call); -primordials.uncurryThis = uncurryThis; - -// `applyBind` is equivalent to `func => Function.prototype.apply.bind(func)`. -// It is using `bind.bind(apply)` to avoid using `Function.prototype.bind` -// and `Function.prototype.apply` after it may have been mutated by users. -const applyBind = bind.bind(apply); -primordials.applyBind = applyBind; - -// Methods that accept a variable number of arguments, and thus it's useful to -// also create `${prefix}${key}Apply`, which uses `Function.prototype.apply`, -// instead of `Function.prototype.call`, and thus doesn't require iterator -// destructuring. -const varargsMethods = [ - // 'ArrayPrototypeConcat' is omitted, because it performs the spread - // on its own for arrays and array-likes with a truthy - // @@isConcatSpreadable symbol property. - 'ArrayOf', - 'ArrayPrototypePush', - 'ArrayPrototypeUnshift', - // 'FunctionPrototypeCall' is omitted, since there's 'ReflectApply' - // and 'FunctionPrototypeApply'. - 'MathHypot', - 'MathMax', - 'MathMin', - 'StringPrototypeConcat', - 'TypedArrayOf', -]; - -function getNewKey(key) { - return typeof key === 'symbol' ? - `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` : - `${key[0].toUpperCase()}${key.slice(1)}`; -} - -function copyAccessor(dest, prefix, key, { enumerable, get, set }) { - ReflectDefineProperty(dest, `${prefix}Get${key}`, { - value: uncurryThis(get), - enumerable - }); - if (set !== undefined) { - ReflectDefineProperty(dest, `${prefix}Set${key}`, { - value: uncurryThis(set), - enumerable - }); - } -} - -function copyPropsRenamed(src, dest, prefix) { - for (const key of ReflectOwnKeys(src)) { - const newKey = getNewKey(key); - const desc = ReflectGetOwnPropertyDescriptor(src, key); - if ('get' in desc) { - copyAccessor(dest, prefix, newKey, desc); - } else { - const name = `${prefix}${newKey}`; - ReflectDefineProperty(dest, name, desc); - if (varargsMethods.includes(name)) { - ReflectDefineProperty(dest, `${name}Apply`, { - // `src` is bound as the `this` so that the static `this` points - // to the object it was defined on, - // e.g.: `ArrayOfApply` gets a `this` of `Array`: - value: applyBind(desc.value, src), - }); - } - } - } -} - -function copyPropsRenamedBound(src, dest, prefix) { - for (const key of ReflectOwnKeys(src)) { - const newKey = getNewKey(key); - const desc = ReflectGetOwnPropertyDescriptor(src, key); - if ('get' in desc) { - copyAccessor(dest, prefix, newKey, desc); - } else { - const { value } = desc; - if (typeof value === 'function') { - desc.value = value.bind(src); - } - - const name = `${prefix}${newKey}`; - ReflectDefineProperty(dest, name, desc); - if (varargsMethods.includes(name)) { - ReflectDefineProperty(dest, `${name}Apply`, { - value: applyBind(value, src), - }); - } - } - } -} - -function copyPrototype(src, dest, prefix) { - for (const key of ReflectOwnKeys(src)) { - const newKey = getNewKey(key); - const desc = ReflectGetOwnPropertyDescriptor(src, key); - if ('get' in desc) { - copyAccessor(dest, prefix, newKey, desc); - } else { - const { value } = desc; - if (typeof value === 'function') { - desc.value = uncurryThis(value); - } - - const name = `${prefix}${newKey}`; - ReflectDefineProperty(dest, name, desc); - if (varargsMethods.includes(name)) { - ReflectDefineProperty(dest, `${name}Apply`, { - value: applyBind(value), - }); - } - } - } -} - -// Create copies of configurable value properties of the global object -[ - 'Proxy', - 'globalThis', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - primordials[name] = globalThis[name]; -}); - -// Create copies of URI handling functions -[ - decodeURI, - decodeURIComponent, - encodeURI, - encodeURIComponent, -].forEach((fn) => { - primordials[fn.name] = fn; -}); - -// Create copies of legacy functions -[ - escape, - eval, - unescape, -].forEach((fn) => { - primordials[fn.name] = fn; -}); - -// Create copies of the namespace objects -[ - 'JSON', - 'Math', - 'Proxy', - 'Reflect', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - copyPropsRenamed(globalThis[name], primordials, name); -}); - -// Create copies of intrinsic objects -[ - 'AggregateError', - 'Array', - 'ArrayBuffer', - 'BigInt', - 'BigInt64Array', - 'BigUint64Array', - 'Boolean', - 'DataView', - 'Date', - 'Error', - 'EvalError', - 'FinalizationRegistry', - 'Float32Array', - 'Float64Array', - 'Function', - 'Int16Array', - 'Int32Array', - 'Int8Array', - 'Map', - 'Number', - 'Object', - 'RangeError', - 'ReferenceError', - 'RegExp', - 'Set', - 'String', - 'Symbol', - 'SyntaxError', - 'TypeError', - 'URIError', - 'Uint16Array', - 'Uint32Array', - 'Uint8Array', - 'Uint8ClampedArray', - 'WeakMap', - 'WeakRef', - 'WeakSet', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - const original = globalThis[name]; - primordials[name] = original; - copyPropsRenamed(original, primordials, name); - copyPrototype(original.prototype, primordials, `${name}Prototype`); -}); - -// Create copies of intrinsic objects that require a valid `this` to call -// static methods. -// Refs: https://www.ecma-international.org/ecma-262/#sec-promise.all -[ - 'Promise', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - const original = globalThis[name]; - primordials[name] = original; - copyPropsRenamedBound(original, primordials, name); - copyPrototype(original.prototype, primordials, `${name}Prototype`); -}); - -// Create copies of abstract intrinsic objects that are not directly exposed -// on the global object. -// Refs: https://tc39.es/ecma262/#sec-%typedarray%-intrinsic-object -[ - { name: 'TypedArray', original: Reflect.getPrototypeOf(Uint8Array) }, - { name: 'ArrayIterator', original: { - prototype: Reflect.getPrototypeOf(Array.prototype[Symbol.iterator]()), - } }, - { name: 'StringIterator', original: { - prototype: Reflect.getPrototypeOf(String.prototype[Symbol.iterator]()), - } }, -].forEach(({ name, original }) => { - primordials[name] = original; - // The static %TypedArray% methods require a valid `this`, but can't be bound, - // as they need a subclass constructor as the receiver: - copyPrototype(original, primordials, name); - copyPrototype(original.prototype, primordials, `${name}Prototype`); -}); - -/* eslint-enable node-core/prefer-primordials */ - -const { - ArrayPrototypeForEach, - FinalizationRegistry, - FunctionPrototypeCall, - Map, - ObjectFreeze, - ObjectSetPrototypeOf, - Promise, - PromisePrototypeThen, - Set, - SymbolIterator, - WeakMap, - WeakRef, - WeakSet, -} = require('./primordials'); - -// Because these functions are used by `makeSafe`, which is exposed -// on the `primordials` object, it's important to use const references -// to the primordials that they use: -const createSafeIterator = (factory, next) => { - class SafeIterator { - constructor(iterable) { - this._iterator = factory(iterable); - } - next() { - return next(this._iterator); - } - [SymbolIterator]() { - return this; - } - } - ObjectSetPrototypeOf(SafeIterator.prototype, null); - ObjectFreeze(SafeIterator.prototype); - ObjectFreeze(SafeIterator); - return SafeIterator; -}; - -primordials.SafeArrayIterator = createSafeIterator( - primordials.ArrayPrototypeSymbolIterator, - primordials.ArrayIteratorPrototypeNext -); -primordials.SafeStringIterator = createSafeIterator( - primordials.StringPrototypeSymbolIterator, - primordials.StringIteratorPrototypeNext -); - -const copyProps = (src, dest) => { - ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => { - if (!ReflectGetOwnPropertyDescriptor(dest, key)) { - ReflectDefineProperty( - dest, - key, - ReflectGetOwnPropertyDescriptor(src, key)); - } - }); -}; - -/** - * @type {typeof primordials.makeSafe} - */ -const makeSafe = (unsafe, safe) => { - if (SymbolIterator in unsafe.prototype) { - const dummy = new unsafe(); - let next; // We can reuse the same `next` method. - - ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => { - if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) { - const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key); - if ( - typeof desc.value === 'function' && - desc.value.length === 0 && - SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {}) - ) { - const createIterator = uncurryThis(desc.value); - if (typeof next === 'undefined') { - next = uncurryThis(createIterator(dummy).next); - } - - const SafeIterator = createSafeIterator(createIterator, next); - desc.value = function() { - return new SafeIterator(this); - }; - } - ReflectDefineProperty(safe.prototype, key, desc); - } - }); - } else { - copyProps(unsafe.prototype, safe.prototype); - } - copyProps(unsafe, safe); - - ObjectSetPrototypeOf(safe.prototype, null); - ObjectFreeze(safe.prototype); - ObjectFreeze(safe); - return safe; -}; -primordials.makeSafe = makeSafe; - -// Subclass the constructors because we need to use their prototype -// methods later. -// Defining the `constructor` is necessary here to avoid the default -// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. -primordials.SafeMap = makeSafe( - Map, - class SafeMap extends Map { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeWeakMap = makeSafe( - WeakMap, - class SafeWeakMap extends WeakMap { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); - -primordials.SafeSet = makeSafe( - Set, - class SafeSet extends Set { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeWeakSet = makeSafe( - WeakSet, - class SafeWeakSet extends WeakSet { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); - -primordials.SafeFinalizationRegistry = makeSafe( - FinalizationRegistry, - class SafeFinalizationRegistry extends FinalizationRegistry { - // eslint-disable-next-line no-useless-constructor - constructor(cleanupCallback) { super(cleanupCallback); } - } -); -primordials.SafeWeakRef = makeSafe( - WeakRef, - class SafeWeakRef extends WeakRef { - // eslint-disable-next-line no-useless-constructor - constructor(target) { super(target); } - } -); - -const SafePromise = makeSafe( - Promise, - class SafePromise extends Promise { - // eslint-disable-next-line no-useless-constructor - constructor(executor) { super(executor); } - } -); - -primordials.PromisePrototypeCatch = (thisPromise, onRejected) => - PromisePrototypeThen(thisPromise, undefined, onRejected); - -/** - * Attaches a callback that is invoked when the Promise is settled (fulfilled or - * rejected). The resolved value cannot be modified from the callback. - * Prefer using async functions when possible. - * @param {Promise} thisPromise - * @param {() => void) | undefined | null} onFinally The callback to execute - * when the Promise is settled (fulfilled or rejected). - * @returns {Promise} A Promise for the completion of the callback. - */ -primordials.SafePromisePrototypeFinally = (thisPromise, onFinally) => - // Wrapping on a new Promise is necessary to not expose the SafePromise - // prototype to user-land. - new Promise((a, b) => - new SafePromise((a, b) => PromisePrototypeThen(thisPromise, a, b)) - .finally(onFinally) - .then(a, b) - ); - -primordials.AsyncIteratorPrototype = - primordials.ReflectGetPrototypeOf( - primordials.ReflectGetPrototypeOf( - async function* () {}).prototype); - -ObjectSetPrototypeOf(primordials, null); -ObjectFreeze(primordials); diff --git a/lib/internal/streams/add-abort-signal.js b/lib/internal/streams/add-abort-signal.js index 12124490b7..0cfda5d75a 100644 --- a/lib/internal/streams/add-abort-signal.js +++ b/lib/internal/streams/add-abort-signal.js @@ -3,7 +3,7 @@ const { AbortError, codes, -} = require('../errors'); +} = require('../../ours/errors'); const eos = require('./end-of-stream'); const { ERR_INVALID_ARG_TYPE } = codes; diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js index e3e6bd923b..bef1a9cb3b 100644 --- a/lib/internal/streams/buffer_list.js +++ b/lib/internal/streams/buffer_list.js @@ -5,10 +5,10 @@ const { SymbolIterator, TypedArrayPrototypeSet, Uint8Array, -} = require('../primordials'); +} = require('../../ours/primordials'); const { Buffer } = require('buffer'); -const inspect = { custom: Symbol('nodejs.util.inspect.custom') }; +const { inspect } = require('util'); module.exports = class BufferList { constructor() { @@ -169,7 +169,7 @@ module.exports = class BufferList { } // Make sure the linked list only shows the minimal necessary information. - [inspect.custom](_, options) { + [Symbol.for('nodejs.util.inspect.custom')](_, options) { return inspect(this, { ...options, // Only inspect one level. diff --git a/lib/internal/streams/compose.js b/lib/internal/streams/compose.js index 065325a2eb..47f1c547c3 100644 --- a/lib/internal/streams/compose.js +++ b/lib/internal/streams/compose.js @@ -14,7 +14,7 @@ const { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS, }, -} = require('../errors'); +} = require('../../ours/errors'); module.exports = function compose(...streams) { if (streams.length === 0) { diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js index f719ea88cc..6f1787953a 100644 --- a/lib/internal/streams/destroy.js +++ b/lib/internal/streams/destroy.js @@ -6,10 +6,10 @@ const { ERR_MULTIPLE_CALLBACK, }, AbortError, -} = require('../errors'); +} = require('../../ours/errors'); const { Symbol, -} = require('../primordials'); +} = require('../../ours/primordials'); const { kDestroyed, isDestroyed, @@ -106,20 +106,7 @@ function _destroy(self, err, cb) { } } try { - const result = self._destroy(err || null, onDestroy); - if (result != null) { - const then = result.then; - if (typeof then === 'function') { - then.call( - result, - function() { - process.nextTick(onDestroy, null); - }, - function(err) { - process.nextTick(onDestroy, err); - }); - } - } + self._destroy(err || null, onDestroy); } catch (err) { onDestroy(err); } @@ -285,24 +272,7 @@ function constructNT(stream) { } try { - const result = stream._construct(onConstruct); - if (result != null) { - const then = result.then; - if (typeof then === 'function') { - then.call( - result, - function() { - if (!called) { - process.nextTick(onConstruct, null); - } - }, - function(err) { - if (!called) { - process.nextTick(onConstruct, err); - } - }); - } - } + stream._construct(onConstruct); } catch (err) { onConstruct(err); } diff --git a/lib/internal/streams/duplex.js b/lib/internal/streams/duplex.js index 66668e20db..9588ddc58e 100644 --- a/lib/internal/streams/duplex.js +++ b/lib/internal/streams/duplex.js @@ -31,7 +31,7 @@ const { ObjectGetOwnPropertyDescriptor, ObjectKeys, ObjectSetPrototypeOf, -} = require('../primordials'); +} = require('../../ours/primordials'); module.exports = Duplex; diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js index 4bfaed7a1a..0d90dc9405 100644 --- a/lib/internal/streams/duplexify.js +++ b/lib/internal/streams/duplexify.js @@ -16,11 +16,11 @@ const { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE, }, -} = require('../errors'); +} = require('../../ours/errors'); const { destroyer } = require('./destroy'); const Duplex = require('./duplex'); const Readable = require('./readable'); -const { createDeferredPromise } = require('../../util'); +const { createDeferredPromise } = require('../../ours/util'); const from = require('./from'); const { @@ -42,7 +42,7 @@ const { const { FunctionPrototypeCall -} = require('../primordials'); +} = require('../../ours/primordials'); // This is needed for pre node 17. class Duplexify extends Duplex { diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js index d4e8aa6ad1..488f4fbe97 100644 --- a/lib/internal/streams/end-of-stream.js +++ b/lib/internal/streams/end-of-stream.js @@ -6,27 +6,30 @@ const { AbortError, codes, -} = require('../errors'); +} = require('../../ours/errors'); const { + ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes; -const { once } = require('../../util'); +const { once } = require('../../ours/util'); const { validateAbortSignal, validateFunction, validateObject, } = require('../validators'); -const { Promise } = require('../primordials'); +const { Promise } = require('../../ours/primordials'); const { isClosed, isReadable, isReadableNodeStream, isReadableFinished, + isReadableErrored, isWritable, isWritableNodeStream, isWritableFinished, + isWritableErrored, isNodeStream, willEmitClose: _willEmitClose, } = require('./utils'); @@ -56,7 +59,7 @@ function eos(stream, options, callback) { if (!isNodeStream(stream)) { // TODO: Webstreams. - // TODO: Throw INVALID_ARG_TYPE. + throw new ERR_INVALID_ARG_TYPE('stream', 'Stream', stream); } const wState = stream._writableState; @@ -124,7 +127,7 @@ function eos(stream, options, callback) { const onclose = () => { closed = true; - const errored = wState?.errored || rState?.errored; + const errored = isWritableErrored(stream) || isReadableErrored(stream); if (errored && typeof errored !== 'boolean') { return callback.call(stream, errored); diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js index d00a9e8a88..051d563b69 100644 --- a/lib/internal/streams/from.js +++ b/lib/internal/streams/from.js @@ -4,13 +4,13 @@ const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator, -} = require('../primordials'); +} = require('../../ours/primordials'); const { Buffer } = require('buffer'); const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES -} = require('../errors').codes; +} = require('../../ours/errors').codes; function from(Readable, iterable, opts) { let iterator; diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js index 45949f0706..d6a7632b82 100644 --- a/lib/internal/streams/lazy_transform.js +++ b/lib/internal/streams/lazy_transform.js @@ -7,7 +7,7 @@ const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf, -} = require('../primordials'); +} = require('../../ours/primordials'); const stream = require('stream'); diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js index 0015a134e9..a618a86991 100644 --- a/lib/internal/streams/legacy.js +++ b/lib/internal/streams/legacy.js @@ -3,7 +3,7 @@ const { ArrayIsArray, ObjectSetPrototypeOf, -} = require('../primordials'); +} = require('../../ours/primordials'); const EE = require('events'); diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js index 5d895bff95..3fb669f73a 100644 --- a/lib/internal/streams/operators.js +++ b/lib/internal/streams/operators.js @@ -13,13 +13,13 @@ const { ERR_OUT_OF_RANGE, }, AbortError, -} = require('../errors'); +} = require('../../ours/errors'); const { validateAbortSignal, validateInteger, validateObject, } = require('../validators'); -const kWeakHandler = require('../primordials').Symbol('kWeak'); +const kWeakHandler = require('../../ours/primordials').Symbol('kWeak'); const { finished } = require('./end-of-stream'); const { @@ -31,7 +31,7 @@ const { PromiseReject, PromisePrototypeCatch, Symbol, -} = require('../primordials'); +} = require('../../ours/primordials'); const kEmpty = Symbol('kEmpty'); const kEof = Symbol('kEof'); diff --git a/lib/internal/streams/passthrough.js b/lib/internal/streams/passthrough.js index 4320521185..8ec6a791ee 100644 --- a/lib/internal/streams/passthrough.js +++ b/lib/internal/streams/passthrough.js @@ -27,7 +27,7 @@ const { ObjectSetPrototypeOf, -} = require('../primordials'); +} = require('../../ours/primordials'); module.exports = PassThrough; diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index be1a7bdc92..815044191f 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -7,10 +7,10 @@ const { ArrayIsArray, Promise, SymbolAsyncIterator, -} = require('../primordials'); +} = require('../../ours/primordials'); const eos = require('./end-of-stream'); -const { once } = require('../../util'); +const { once } = require('../../ours/util'); const destroyImpl = require('./destroy'); const Duplex = require('./duplex'); const { @@ -22,15 +22,16 @@ const { ERR_STREAM_DESTROYED, }, AbortError, -} = require('../errors'); +} = require('../../ours/errors'); const { - validateCallback, + validateFunction, validateAbortSignal } = require('../validators'); const { isIterable, + isReadable, isReadableNodeStream, isNodeStream, } = require('./utils'); @@ -49,14 +50,17 @@ function destroyer(stream, reading, writing) { finished = true; }); - eos(stream, { readable: reading, writable: writing }, (err) => { + const cleanup = eos(stream, { readable: reading, writable: writing }, (err) => { finished = !err; }); - return (err) => { - if (finished) return; - finished = true; - destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')); + return { + destroy: (err) => { + if (finished) return; + finished = true; + destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')); + }, + cleanup }; } @@ -64,7 +68,7 @@ function popCallback(streams) { // Streams should never be an empty array. It should always contain at least // a single stream. Therefore optimize for the average case instead of // checking for length === 0 as well. - validateCallback(streams[streams.length - 1]); + validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]'); return streams.pop(); } @@ -163,6 +167,10 @@ function pipelineImpl(streams, callback, opts) { const signal = ac.signal; const outerSignal = opts?.signal; + // Need to cleanup event listeners if last stream is readable + // https://github.com/nodejs/node/issues/35452 + const lastStreamCleanup = []; + validateAbortSignal(outerSignal, 'options.signal'); function abort() { @@ -198,6 +206,9 @@ function pipelineImpl(streams, callback, opts) { ac.abort(); if (final) { + if (!error) { + lastStreamCleanup.forEach((fn) => fn()); + } process.nextTick(callback, error, value); } } @@ -208,14 +219,20 @@ function pipelineImpl(streams, callback, opts) { const reading = i < streams.length - 1; const writing = i > 0; const end = reading || opts?.end !== false; + const isLastStream = i === streams.length - 1; if (isNodeStream(stream)) { if (end) { - destroys.push(destroyer(stream, reading, writing)); + const { destroy, cleanup } = destroyer(stream, reading, writing); + destroys.push(destroy); + + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } } // Catch stream errors that occur after pipe/pump has completed. - stream.on('error', (err) => { + function onError(err) { if ( err && err.name !== 'AbortError' && @@ -223,7 +240,13 @@ function pipelineImpl(streams, callback, opts) { ) { finish(err); } - }); + } + stream.on('error', onError); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(() => { + stream.removeListener('error', onError); + }); + } } if (i === 0) { @@ -291,12 +314,19 @@ function pipelineImpl(streams, callback, opts) { ret = pt; - destroys.push(destroyer(ret, false, true)); + const { destroy, cleanup } = destroyer(ret, false, true); + destroys.push(destroy); + if (isLastStream) { + lastStreamCleanup.push(cleanup); + } } } else if (isNodeStream(stream)) { if (isReadableNodeStream(ret)) { finishCount += 2; - pipe(ret, stream, finish, { end }); + const cleanup = pipe(ret, stream, finish, { end }); + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup); + } } else if (isIterable(ret)) { finishCount++; pump(ret, stream, finish, { end }); @@ -351,7 +381,7 @@ function pipe(src, dst, finish, { end }) { finish(err); } }); - eos(dst, { readable: false, writable: true }, finish); + return eos(dst, { readable: false, writable: true }, finish); } module.exports = { pipelineImpl, pipeline }; diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index b01d40b564..10d7ace043 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -33,7 +33,7 @@ const { SafeSet, SymbolAsyncIterator, Symbol -} = require('../primordials'); +} = require('../../ours/primordials'); module.exports = Readable; Readable.ReadableState = ReadableState; @@ -47,7 +47,7 @@ const { } = require('./add-abort-signal'); const eos = require('./end-of-stream'); -let debug = require('../../util').debuglog('stream', (fn) => { +let debug = require('util').debuglog('stream', (fn) => { debug = fn; }); const BufferList = require('./buffer_list'); @@ -66,7 +66,7 @@ const { ERR_STREAM_PUSH_AFTER_EOF, ERR_STREAM_UNSHIFT_AFTER_END_EVENT, } -} = require('../errors'); +} = require('../../ours/errors'); const { validateObject } = require('../validators'); const kPaused = Symbol('kPaused'); @@ -493,18 +493,7 @@ Readable.prototype.read = function(n) { // Call internal read method try { - const result = this._read(state.highWaterMark); - if (result != null) { - const then = result.then; - if (typeof then === 'function') { - then.call( - result, - nop, - function(err) { - errorOrDestroy(this, err); - }); - } - } + this._read(state.highWaterMark); } catch (err) { errorOrDestroy(this, err); } @@ -1244,13 +1233,23 @@ ObjectDefineProperties(Readable.prototype, { } }, + errored: { + enumerable: false, + get() { + return this._readableState ? this._readableState.errored : null; + } + }, + + closed: { + get() { + return this._readableState ? this._readableState.closed : false; + } + }, + destroyed: { enumerable: false, get() { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; + return this._readableState ? this._readableState.destroyed : false; }, set(value) { // We ignore the value if the stream diff --git a/lib/internal/streams/state.js b/lib/internal/streams/state.js index 9261fec3b5..60af7be88b 100644 --- a/lib/internal/streams/state.js +++ b/lib/internal/streams/state.js @@ -3,9 +3,9 @@ const { MathFloor, NumberIsInteger, -} = require('../primordials'); +} = require('../../ours/primordials'); -const { ERR_INVALID_ARG_VALUE } = require('../errors').codes; +const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes; function highWaterMarkFrom(options, isDuplex, duplexKey) { return options.highWaterMark != null ? options.highWaterMark : diff --git a/lib/internal/streams/transform.js b/lib/internal/streams/transform.js index f48ae3782f..fe6f5cffa4 100644 --- a/lib/internal/streams/transform.js +++ b/lib/internal/streams/transform.js @@ -66,12 +66,12 @@ const { ObjectSetPrototypeOf, Symbol -} = require('../primordials'); +} = require('../../ours/primordials'); module.exports = Transform; const { ERR_METHOD_NOT_IMPLEMENTED -} = require('../errors').codes; +} = require('../../ours/errors').codes; const Duplex = require('./duplex'); ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); ObjectSetPrototypeOf(Transform, Duplex); @@ -107,10 +107,8 @@ function Transform(options) { } function final(cb) { - let called = false; if (typeof this._flush === 'function' && !this.destroyed) { - const result = this._flush((er, data) => { - called = true; + this._flush((er, data) => { if (er) { if (cb) { cb(er); @@ -128,33 +126,6 @@ function final(cb) { cb(); } }); - if (result !== undefined && result !== null) { - try { - const then = result.then; - if (typeof then === 'function') { - then.call( - result, - (data) => { - if (called) - return; - if (data != null) - this.push(data); - this.push(null); - if (cb) - process.nextTick(cb); - }, - (err) => { - if (cb) { - process.nextTick(cb, err); - } else { - process.nextTick(() => this.destroy(err)); - } - }); - } - } catch (err) { - process.nextTick(() => this.destroy(err)); - } - } } else { this.push(null); if (cb) { @@ -180,9 +151,7 @@ Transform.prototype._write = function(chunk, encoding, callback) { const wState = this._writableState; const length = rState.length; - let called = false; - const result = this._transform(chunk, encoding, (err, val) => { - called = true; + this._transform(chunk, encoding, (err, val) => { if (err) { callback(err); return; @@ -204,38 +173,6 @@ Transform.prototype._write = function(chunk, encoding, callback) { this[kCallback] = callback; } }); - if (result !== undefined && result != null) { - try { - const then = result.then; - if (typeof then === 'function') { - then.call( - result, - (val) => { - if (called) - return; - - if (val != null) { - this.push(val); - } - - if ( - wState.ended || - length === rState.length || - rState.length < rState.highWaterMark || - rState.length === 0) { - process.nextTick(callback); - } else { - this[kCallback] = callback; - } - }, - (err) => { - process.nextTick(callback, err); - }); - } - } catch (err) { - process.nextTick(callback, err); - } - } }; Transform.prototype._read = function() { diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js index bd653c4fc0..0ac9821710 100644 --- a/lib/internal/streams/utils.js +++ b/lib/internal/streams/utils.js @@ -4,7 +4,7 @@ const { Symbol, SymbolAsyncIterator, SymbolIterator, -} = require('../primordials'); +} = require('../../ours/primordials'); const kDestroyed = Symbol('kDestroyed'); const kIsErrored = Symbol('kIsErrored'); @@ -153,11 +153,39 @@ function isFinished(stream, opts) { return true; } +function isWritableErrored(stream) { + if (!isNodeStream(stream)) { + return null; + } + + if (stream.writableErrored) { + return stream.writableErrored; + } + + return stream._writableState?.errored ?? null; +} + +function isReadableErrored(stream) { + if (!isNodeStream(stream)) { + return null; + } + + if (stream.readableErrored) { + return stream.readableErrored; + } + + return stream._readableState?.errored ?? null; +} + function isClosed(stream) { if (!isNodeStream(stream)) { return null; } + if (typeof stream.closed === 'boolean') { + return stream.closed; + } + const wState = stream._writableState; const rState = stream._readableState; @@ -249,11 +277,13 @@ module.exports = { isReadableNodeStream, isReadableEnded, isReadableFinished, + isReadableErrored, isNodeStream, isWritable, isWritableNodeStream, isWritableEnded, isWritableFinished, + isWritableErrored, isServerRequest, isServerResponse, willEmitClose, diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index 1b156f3cba..a0516e8876 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -35,7 +35,7 @@ const { StringPrototypeToLowerCase, Symbol, SymbolHasInstance, -} = require('../primordials'); +} = require('../../ours/primordials'); module.exports = Writable; Writable.WritableState = WritableState; @@ -63,7 +63,7 @@ const { ERR_STREAM_NULL_VALUES, ERR_STREAM_WRITE_AFTER_END, ERR_UNKNOWN_ENCODING -} = require('../errors').codes; +} = require('../../ours/errors').codes; const { errorOrDestroy } = destroyImpl; @@ -650,6 +650,7 @@ Writable.prototype.end = function(chunk, encoding, cb) { function needFinish(state) { return (state.ending && + !state.destroyed && state.constructed && state.length === 0 && !state.errored && @@ -692,24 +693,7 @@ function callFinal(stream, state) { state.pendingcb++; try { - const result = stream._final(onFinish); - if (result != null) { - const then = result.then; - if (typeof then === 'function') { - then.call( - result, - function() { - if (!called) { - process.nextTick(onFinish, null); - } - }, - function(err) { - if (!called) { - process.nextTick(onFinish, err); - } - }); - } - } + stream._final(onFinish); } catch (err) { onFinish(err); } @@ -732,11 +716,18 @@ function prefinish(stream, state) { function finishMaybe(stream, state, sync) { if (needFinish(state)) { prefinish(stream, state); - if (state.pendingcb === 0 && needFinish(state)) { - state.pendingcb++; + if (state.pendingcb === 0) { if (sync) { - process.nextTick(finish, stream, state); - } else { + state.pendingcb++; + process.nextTick((stream, state) => { + if (needFinish(state)) { + finish(stream, state); + } else { + state.pendingcb--; + } + }, stream, state); + } else if (needFinish(state)) { + state.pendingcb++; finish(stream, state); } } @@ -772,6 +763,12 @@ function finish(stream, state) { ObjectDefineProperties(Writable.prototype, { + closed: { + get() { + return this._writableState ? this._writableState.closed : false; + } + }, + destroyed: { get() { return this._writableState ? this._writableState.destroyed : false; @@ -850,7 +847,25 @@ ObjectDefineProperties(Writable.prototype, { get() { return this._writableState && this._writableState.length; } - } + }, + + errored: { + enumerable: false, + get() { + return this._writableState ? this._writableState.errored : null; + } + }, + + writableAborted: { + enumerable: false, + get: function() { + return !!( + this._writableState.writable !== false && + (this._writableState.destroyed || this._writableState.errored) && + !this._writableState.finished + ); + } + }, }); const destroy = destroyImpl.destroy; diff --git a/lib/internal/validators.js b/lib/internal/validators.js index 4b672ea3a8..89337dd068 100644 --- a/lib/internal/validators.js +++ b/lib/internal/validators.js @@ -13,7 +13,7 @@ const { String, StringPrototypeToUpperCase, StringPrototypeTrim, -} = require('./primordials'); +} = require('../ours/primordials'); const { hideStackFrames, @@ -23,14 +23,13 @@ const { ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL, - ERR_INVALID_CALLBACK, } -} = require('./errors'); -const { normalizeEncoding } = require('../util'); +} = require('../ours/errors'); +const { normalizeEncoding } = require('../ours/util'); const { isAsyncFunction, isArrayBufferView -} = require('../util'); +} = require('util').types; const signals = {}; function isInt32(value) { @@ -222,11 +221,6 @@ function validatePort(port, name = 'Port', allowZero = true) { return port | 0; } -const validateCallback = hideStackFrames((callback) => { - if (typeof callback !== 'function') - throw new ERR_INVALID_CALLBACK(callback); -}); - const validateAbortSignal = hideStackFrames((signal, name) => { if (signal !== undefined && (signal === null || @@ -271,6 +265,5 @@ module.exports = { validateString, validateUint32, validateUndefined, - validateCallback, validateAbortSignal, }; diff --git a/lib/browser.js b/lib/ours/browser.js similarity index 93% rename from lib/browser.js rename to lib/ours/browser.js index c38850733a..ac901c17e7 100644 --- a/lib/browser.js +++ b/lib/ours/browser.js @@ -1,7 +1,7 @@ 'use strict' -const CustomStream = require('./stream') -const promises = require('./stream/promises') +const CustomStream = require('../stream') +const promises = require('../stream/promises') const originalDestroy = CustomStream.Readable.destroy module.exports = CustomStream.Readable diff --git a/lib/ours/errors.js b/lib/ours/errors.js new file mode 100644 index 0000000000..07948aa1d0 --- /dev/null +++ b/lib/ours/errors.js @@ -0,0 +1,353 @@ +'use strict' + +/* + This file is a reduced and adapted version of the main lib/internal/errors.js file defined at + + https://github.com/nodejs/node/blob/master/lib/internal/errors.js + + Don't try to replace with the original file and keep it up to date (starting from E(...) definitions) + with the upstream file. +*/ + +if (typeof AggregateError === 'undefined') { + globalThis.AggregateError = require('aggregate-error') +} + +const assert = require('assert') +const { inspect, format } = require('util') + +const kIsNodeError = Symbol('kIsNodeError') +const kTypes = [ + 'string', + 'function', + 'number', + 'object', + // Accept 'Function' and 'Object' as alternative to the lower cased version. + 'Function', + 'Object', + 'boolean', + 'bigint', + 'symbol' +] +const classRegExp = /^([A-Z][a-z0-9]*)+$/ +const nodeInternalPrefix = '__node_internal_' +const codes = {} + +// Only use this for integers! Decimal numbers do not work with this function. +function addNumericalSeparator(val) { + let res = '' + let i = val.length + const start = val[0] === '-' ? 1 : 0 + for (; i >= start + 4; i -= 3) { + res = `_${val.slice(i - 3, i)}${res}` + } + return `${val.slice(0, i)}${res}` +} + +function getMessage(key, msg, args) { + if (typeof msg === 'function') { + assert( + msg.length <= args.length, // Default options do not count. + `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).` + ) + + return msg(...args) + } + + const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length + + assert( + expectedLength === args.length, + `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).` + ) + + if (args.length === 0) { + return msg + } + + return format(msg, ...args) +} + +function E(code, message, Base) { + if (!Base) { + Base = Error + } + + class NodeError extends Base { + constructor(...args) { + super(getMessage(code, message, args)) + } + + toString() { + return `${this.name} [${code}]: ${this.message}` + } + } + + NodeError.prototype.name = Base.name + NodeError.prototype.code = code + NodeError.prototype[kIsNodeError] = true + NodeError.prototype.toString = function () { + return `${this.name} [${code}]: ${this.message}` + } + + codes[code] = NodeError +} + +function hideStackFrames(fn) { + // We rename the functions that will be hidden to cut off the stacktrace + // at the outermost one + const hidden = nodeInternalPrefix + fn.name + Object.defineProperty(fn, 'name', { value: hidden }) + return fn +} + +function aggregateTwoErrors(innerError, outerError) { + if (innerError && outerError && innerError !== outerError) { + if (Array.isArray(outerError.errors)) { + // If `outerError` is already an `AggregateError`. + outerError.errors.push(innerError) + return outerError + } + + const err = new AggregateError([outerError, innerError], outerError.message) + err.code = outerError.code + return err + } + + return innerError || outerError +} + +class AbortError extends Error { + constructor(message = 'The operation was aborted', options = undefined) { + if (options !== undefined && typeof options !== 'object') { + throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options) + } + + super(message, options) + this.code = 'ABORT_ERR' + this.name = 'AbortError' + } +} + +E( + 'ERR_INVALID_ARG_TYPE', + (name, expected, actual) => { + assert(typeof name === 'string', "'name' must be a string") + + if (!Array.isArray(expected)) { + expected = [expected] + } + + let msg = 'The ' + if (name.endsWith(' argument')) { + // For cases like 'first argument' + msg += `${name} ` + } else { + msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} ` + } + + msg += 'must be ' + + const types = [] + const instances = [] + const other = [] + + for (const value of expected) { + assert(typeof value === 'string', 'All expected entries have to be of type string') + + if (kTypes.includes(value)) { + types.push(value.toLowerCase()) + } else if (classRegExp.test(value)) { + instances.push(value) + } else { + assert(value !== 'object', 'The value "object" should be written as "Object"') + other.push(value) + } + } + + // Special handle `object` in case other instances are allowed to outline + // the differences between each other. + if (instances.length > 0) { + const pos = types.indexOf('object') + + if (pos !== -1) { + types.splice(types, pos, 1) + instances.push('Object') + } + } + + if (types.length > 0) { + switch (types.length) { + case 1: + msg += `of type ${types[0]}` + break + case 2: + msg += `one of type ${types[0]} or ${types[1]}` + break + default: { + const last = types.pop() + msg += `one of type ${types.join(', ')}, or ${last}` + } + } + + if (instances.length > 0 || other.length > 0) { + msg += ' or ' + } + } + + if (instances.length > 0) { + switch (instances.length) { + case 1: + msg += `an instance of ${instances[0]}` + break + case 2: + msg += `an instance of ${instances[0]} or ${instances[1]}` + break + default: { + const last = instances.pop() + msg += `an instance of ${instances.join(', ')}, or ${last}` + } + } + + if (other.length > 0) { + msg += ' or ' + } + } + + switch (other.length) { + case 0: + break + case 1: + if (other[0].toLowerCase() !== other[0]) { + msg += 'an ' + } + + msg += `${other[0]}` + break + case 2: + msg += `one of ${other[0]} or ${other[1]}` + break + default: { + const last = other.pop() + msg += `one of ${other.join(', ')}, or ${last}` + } + } + + if (actual == null) { + msg += `. Received ${actual}` + } else if (typeof actual === 'function' && actual.name) { + msg += `. Received function ${actual.name}` + } else if (typeof actual === 'object') { + if (actual.constructor?.name) { + msg += `. Received an instance of ${actual.constructor.name}` + } else { + const inspected = inspect(actual, { depth: -1 }) + msg += `. Received ${inspected}` + } + } else { + let inspected = inspect(actual, { colors: false }) + if (inspected.length > 25) { + inspected = `${inspected.slice(0, 25)}...` + } + msg += `. Received type ${typeof actual} (${inspected})` + } + return msg + }, + TypeError +) + +E( + 'ERR_INVALID_ARG_VALUE', + (name, value, reason = 'is invalid') => { + let inspected = inspect(value) + if (inspected.length > 128) { + inspected = inspected.slice(0, 128) + '...' + } + const type = name.includes('.') ? 'property' : 'argument' + return `The ${type} '${name}' ${reason}. Received ${inspected}` + }, + TypeError +) + +E( + 'ERR_INVALID_RETURN_VALUE', + (input, name, value) => { + const type = value?.constructor?.name ? `instance of ${value.constructor.name}` : `type ${typeof value}` + return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.` + }, + TypeError +) + +E( + 'ERR_MISSING_ARGS', + (...args) => { + assert(args.length > 0, 'At least one arg needs to be specified') + + let msg + const len = args.length + args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ') + + switch (len) { + case 1: + msg += `The ${args[0]} argument` + break + case 2: + msg += `The ${args[0]} and ${args[1]} arguments` + break + default: + { + const last = args.pop() + msg += `The ${args.join(', ')}, and ${last} arguments` + } + break + } + + return `${msg} must be specified` + }, + TypeError +) + +E( + 'ERR_OUT_OF_RANGE', + (str, range, input) => { + assert(range, 'Missing "range" argument') + + let received + + if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { + received = addNumericalSeparator(String(input)) + } else if (typeof input === 'bigint') { + received = String(input) + + if (input > 2n ** 32n || input < -(2n ** 32n)) { + received = addNumericalSeparator(received) + } + + received += 'n' + } else { + received = inspect(input) + } + + return `The value of "${str}" is out of range. It must be ${range}. Received ${received}` + }, + RangeError +) + +E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error) +E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error) +E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error) +E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error) +E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error) +E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError) +E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error) +E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error) +E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error) +E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error) +E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError) + +module.exports = { + AbortError, + aggregateTwoErrors: hideStackFrames(aggregateTwoErrors), + hideStackFrames, + codes +} diff --git a/lib/index.js b/lib/ours/index.js similarity index 96% rename from lib/index.js rename to lib/ours/index.js index e9b59ded0e..6cd12dcfb3 100644 --- a/lib/index.js +++ b/lib/ours/index.js @@ -32,8 +32,8 @@ if (Stream && process.env.READABLE_STREAM === 'disable') { module.exports.Stream = Stream.Stream } else { - const CustomStream = require('./stream') - const promises = require('./stream/promises') + const CustomStream = require('../stream') + const promises = require('../stream/promises') const originalDestroy = CustomStream.Readable.destroy module.exports = CustomStream.Readable diff --git a/lib/ours/primordials.js b/lib/ours/primordials.js new file mode 100644 index 0000000000..14e2680bbc --- /dev/null +++ b/lib/ours/primordials.js @@ -0,0 +1,101 @@ +'use strict' + +/* + This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at + + https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js + + Don't try to replace with the original file and keep it up to date with the upstream file. +*/ + +module.exports = { + ArrayIsArray(self) { + return Array.isArray(self) + }, + ArrayPrototypeIncludes(self, el) { + return self.includes(el) + }, + ArrayPrototypeIndexOf(self, el) { + return self.indexOf(el) + }, + ArrayPrototypeJoin(self, sep) { + return self.join(sep) + }, + ArrayPrototypeMap(self, fn) { + return self.map(fn) + }, + ArrayPrototypePop(self, el) { + return self.pop(el) + }, + ArrayPrototypePush(self, el) { + return self.push(el) + }, + ArrayPrototypeSlice(self, start, end) { + return self.slice(start, end) + }, + Error, + FunctionPrototypeCall(fn, thisArgs, ...args) { + return fn.call(thisArgs, ...args) + }, + FunctionPrototypeSymbolHasInstance(self, instance) { + return Function.prototype[Symbol.hasInstance].call(self, instance) + }, + MathFloor: Math.floor, + Number, + NumberIsInteger: Number.isInteger, + NumberIsNaN: Number.isNaN, + NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, + NumberParseInt: Number.parseInt, + ObjectDefineProperties(self, props) { + return Object.defineProperties(self, props) + }, + ObjectDefineProperty(self, name, prop) { + return Object.defineProperty(self, name, prop) + }, + ObjectGetOwnPropertyDescriptor(self, name) { + return Object.getOwnPropertyDescriptor(self, name) + }, + ObjectKeys(obj) { + return Object.keys(obj) + }, + ObjectSetPrototypeOf(target, proto) { + return Object.setPrototypeOf(target, proto) + }, + Promise, + PromisePrototypeCatch(self, fn) { + return self.catch(fn) + }, + PromisePrototypeThen(self, thenFn, catchFn) { + return self.then(thenFn, catchFn) + }, + PromiseReject(err) { + return Promise.reject(err) + }, + ReflectApply: Reflect.apply, + RegExpPrototypeTest(self, value) { + return self.test(value) + }, + SafeSet: Set, + String, + StringPrototypeSlice(self, start, end) { + return self.slice(start, end) + }, + StringPrototypeToLowerCase(self) { + return self.toLowerCase() + }, + StringPrototypeToUpperCase(self) { + return self.toUpperCase() + }, + StringPrototypeTrim(self) { + return self.trim() + }, + Symbol, + SymbolAsyncIterator: Symbol.asyncIterator, + SymbolHasInstance: Symbol.hasInstance, + SymbolIterator: Symbol.iterator, + TypedArrayPrototypeSet(self, buf, len) { + return self.set(buf, len) + }, + Uint8Array +} diff --git a/lib/ours/util.js b/lib/ours/util.js new file mode 100644 index 0000000000..210fa18fb5 --- /dev/null +++ b/lib/ours/util.js @@ -0,0 +1,65 @@ +'use strict' + +const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor + +module.exports = { + once(callback) { + let called = false + return function (...args) { + if (called) { + return + } + called = true + callback.apply(this, args) + } + }, + createDeferredPromise: function () { + let resolve + let reject + // eslint-disable-next-line promise/param-names + const promise = new Promise((res, rej) => { + resolve = res + reject = rej + }) + return { promise, resolve, reject } + }, + // All following functions are just used in browser + debuglog() { + return function () {} + }, + format(format, ...args) { + // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args + return format.replace(/%([sdifj])/g, function (...[_unused, type]) { + const replacement = args.shift() + + if (type === 'f') { + return replacement.toFixed(6) + } else if (type === 'j') { + return JSON.stringify(replacement) + } else { + return replacement.toString() + } + }) + }, + promisify(fn) { + return new Promise((resolve, reject) => { + fn((err, ...args) => { + if (err) { + return reject(err) + } + return resolve(...args) + }) + }) + }, + inspect: require('object-inspect'), + types: { + isAsyncFunction(fn) { + return fn instanceof AsyncFunction + }, + isArrayBufferView(arr) { + return ArrayBuffer.isView(arr) + } + } +} + +module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/lib/stream.js b/lib/stream.js index 469205bd7a..299713c3d4 100644 --- a/lib/stream.js +++ b/lib/stream.js @@ -25,11 +25,11 @@ const { ObjectDefineProperty, ObjectKeys, ReflectApply, -} = require('./internal/primordials'); +} = require('./ours/primordials'); const { promisify: { custom: customPromisify }, -} = require('./util'); +} = require('util'); const { streamReturningOperators, @@ -40,7 +40,7 @@ const { codes: { ERR_ILLEGAL_CONSTRUCTOR, }, -} = require('./internal/errors'); +} = require('./ours/errors'); const compose = require('./internal/streams/compose'); const { pipeline } = require('./internal/streams/pipeline'); const { destroyer } = require('./internal/streams/destroy'); diff --git a/lib/stream/promises.js b/lib/stream/promises.js index 7d5e7f680c..a9b726e9ee 100644 --- a/lib/stream/promises.js +++ b/lib/stream/promises.js @@ -3,7 +3,7 @@ const { ArrayPrototypePop, Promise, -} = require('../internal/primordials'); +} = require('../ours/primordials'); const { isIterable, diff --git a/lib/util.js b/lib/util.js deleted file mode 100644 index 393a56e534..0000000000 --- a/lib/util.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict' - -module.exports = { - debuglog() { - return function () {} - }, - once(callback) { - let called = false - return function (...args) { - if (called) { - return - } - called = true - callback.apply(this, args) - } - }, - promisify: function (fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err) - } - return resolve(...args) - }) - }) - }, - createDeferredPromise: function () { - let resolve - let reject - - // eslint-disable-next-line promise/param-names - const promise = new Promise((res, rej) => { - resolve = res - reject = rej - }) - - return { promise, resolve, reject } - }, - isError(err) { - return err instanceof Error - }, - ...require('util').types, - // isAsyncFunction, - // isArrayBufferView, - // isRegExp, - // isDate, - - // isAnyArrayBuffer, - // isDataView, - // isPromise, - // isWeakSet, - // isWeakMap, - // isModuleNamespaceObject, - // isBoxedPrimitive, - // isExternal, - // isArgumentsObject, - // isGeneratorFunction, - // Keep in sync with https://github.com/nodejs/node/blob/master/typings/internalBinding/util.d.ts - propertyFilter: { - ALL_PROPERTIES: 0, - ONLY_ENUMERABLE: 2 - }, - // The following methods are not 100% accurate, but there are no equivalent on user-land JS outside of V8 - getProxyDetails(proxy) { - return undefined - }, - getConstructorName(obj) { - return obj !== 'undefined' ? 'undefined' : obj.constructor?.name ?? 'Object' - }, - getOwnNonIndexProperties(obj) { - return Object.getOwnPropertyNames(obj) - }, - join(arr, separator) { - return arr.join(separator) - } -} - -module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/package.json b/package.json index b2709ad3fb..9c1d4c69e7 100644 --- a/package.json +++ b/package.json @@ -22,30 +22,30 @@ "bugs": { "url": "https://github.com/nodejs/readable-stream/issues" }, - "main": "lib/index.js", + "main": "lib/ours/index.js", "files": [ "lib", "LICENSE", "README.md" ], "browser": { - "util": false, - "worker_threads": false, - "./lib/index.js": "./lib/browser.js", - "./lib/internal/inspect.js": "./lib/internal/inspect-browser.js" + "util": "./lib/ours/util.js", + "./lib/index.js": "./lib/ours/browser.js" }, "scripts": { "build": "node build/build.mjs", + "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test:browsers": "airtap test/browser/test-*.js", + "test:browsers": "airtap -p sauce test/browser/test-*.js", "test:browsers:local": "airtap -p local test/browser/test-*.js", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src", "lint": "eslint src" }, "dependencies": { + "abort-controller": "^3.0.0", "aggregate-error": "^3.1.0", - "abort-controller": "^3.0.0" + "object-inspect": "^1.12.0" }, "devDependencies": { "@sinonjs/fake-timers": "^9.1.1", diff --git a/src/browser.js b/src/browser.js index c38850733a..ac901c17e7 100644 --- a/src/browser.js +++ b/src/browser.js @@ -1,7 +1,7 @@ 'use strict' -const CustomStream = require('./stream') -const promises = require('./stream/promises') +const CustomStream = require('../stream') +const promises = require('../stream/promises') const originalDestroy = CustomStream.Readable.destroy module.exports = CustomStream.Readable diff --git a/src/errors.js b/src/errors.js new file mode 100644 index 0000000000..07948aa1d0 --- /dev/null +++ b/src/errors.js @@ -0,0 +1,353 @@ +'use strict' + +/* + This file is a reduced and adapted version of the main lib/internal/errors.js file defined at + + https://github.com/nodejs/node/blob/master/lib/internal/errors.js + + Don't try to replace with the original file and keep it up to date (starting from E(...) definitions) + with the upstream file. +*/ + +if (typeof AggregateError === 'undefined') { + globalThis.AggregateError = require('aggregate-error') +} + +const assert = require('assert') +const { inspect, format } = require('util') + +const kIsNodeError = Symbol('kIsNodeError') +const kTypes = [ + 'string', + 'function', + 'number', + 'object', + // Accept 'Function' and 'Object' as alternative to the lower cased version. + 'Function', + 'Object', + 'boolean', + 'bigint', + 'symbol' +] +const classRegExp = /^([A-Z][a-z0-9]*)+$/ +const nodeInternalPrefix = '__node_internal_' +const codes = {} + +// Only use this for integers! Decimal numbers do not work with this function. +function addNumericalSeparator(val) { + let res = '' + let i = val.length + const start = val[0] === '-' ? 1 : 0 + for (; i >= start + 4; i -= 3) { + res = `_${val.slice(i - 3, i)}${res}` + } + return `${val.slice(0, i)}${res}` +} + +function getMessage(key, msg, args) { + if (typeof msg === 'function') { + assert( + msg.length <= args.length, // Default options do not count. + `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).` + ) + + return msg(...args) + } + + const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length + + assert( + expectedLength === args.length, + `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).` + ) + + if (args.length === 0) { + return msg + } + + return format(msg, ...args) +} + +function E(code, message, Base) { + if (!Base) { + Base = Error + } + + class NodeError extends Base { + constructor(...args) { + super(getMessage(code, message, args)) + } + + toString() { + return `${this.name} [${code}]: ${this.message}` + } + } + + NodeError.prototype.name = Base.name + NodeError.prototype.code = code + NodeError.prototype[kIsNodeError] = true + NodeError.prototype.toString = function () { + return `${this.name} [${code}]: ${this.message}` + } + + codes[code] = NodeError +} + +function hideStackFrames(fn) { + // We rename the functions that will be hidden to cut off the stacktrace + // at the outermost one + const hidden = nodeInternalPrefix + fn.name + Object.defineProperty(fn, 'name', { value: hidden }) + return fn +} + +function aggregateTwoErrors(innerError, outerError) { + if (innerError && outerError && innerError !== outerError) { + if (Array.isArray(outerError.errors)) { + // If `outerError` is already an `AggregateError`. + outerError.errors.push(innerError) + return outerError + } + + const err = new AggregateError([outerError, innerError], outerError.message) + err.code = outerError.code + return err + } + + return innerError || outerError +} + +class AbortError extends Error { + constructor(message = 'The operation was aborted', options = undefined) { + if (options !== undefined && typeof options !== 'object') { + throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options) + } + + super(message, options) + this.code = 'ABORT_ERR' + this.name = 'AbortError' + } +} + +E( + 'ERR_INVALID_ARG_TYPE', + (name, expected, actual) => { + assert(typeof name === 'string', "'name' must be a string") + + if (!Array.isArray(expected)) { + expected = [expected] + } + + let msg = 'The ' + if (name.endsWith(' argument')) { + // For cases like 'first argument' + msg += `${name} ` + } else { + msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} ` + } + + msg += 'must be ' + + const types = [] + const instances = [] + const other = [] + + for (const value of expected) { + assert(typeof value === 'string', 'All expected entries have to be of type string') + + if (kTypes.includes(value)) { + types.push(value.toLowerCase()) + } else if (classRegExp.test(value)) { + instances.push(value) + } else { + assert(value !== 'object', 'The value "object" should be written as "Object"') + other.push(value) + } + } + + // Special handle `object` in case other instances are allowed to outline + // the differences between each other. + if (instances.length > 0) { + const pos = types.indexOf('object') + + if (pos !== -1) { + types.splice(types, pos, 1) + instances.push('Object') + } + } + + if (types.length > 0) { + switch (types.length) { + case 1: + msg += `of type ${types[0]}` + break + case 2: + msg += `one of type ${types[0]} or ${types[1]}` + break + default: { + const last = types.pop() + msg += `one of type ${types.join(', ')}, or ${last}` + } + } + + if (instances.length > 0 || other.length > 0) { + msg += ' or ' + } + } + + if (instances.length > 0) { + switch (instances.length) { + case 1: + msg += `an instance of ${instances[0]}` + break + case 2: + msg += `an instance of ${instances[0]} or ${instances[1]}` + break + default: { + const last = instances.pop() + msg += `an instance of ${instances.join(', ')}, or ${last}` + } + } + + if (other.length > 0) { + msg += ' or ' + } + } + + switch (other.length) { + case 0: + break + case 1: + if (other[0].toLowerCase() !== other[0]) { + msg += 'an ' + } + + msg += `${other[0]}` + break + case 2: + msg += `one of ${other[0]} or ${other[1]}` + break + default: { + const last = other.pop() + msg += `one of ${other.join(', ')}, or ${last}` + } + } + + if (actual == null) { + msg += `. Received ${actual}` + } else if (typeof actual === 'function' && actual.name) { + msg += `. Received function ${actual.name}` + } else if (typeof actual === 'object') { + if (actual.constructor?.name) { + msg += `. Received an instance of ${actual.constructor.name}` + } else { + const inspected = inspect(actual, { depth: -1 }) + msg += `. Received ${inspected}` + } + } else { + let inspected = inspect(actual, { colors: false }) + if (inspected.length > 25) { + inspected = `${inspected.slice(0, 25)}...` + } + msg += `. Received type ${typeof actual} (${inspected})` + } + return msg + }, + TypeError +) + +E( + 'ERR_INVALID_ARG_VALUE', + (name, value, reason = 'is invalid') => { + let inspected = inspect(value) + if (inspected.length > 128) { + inspected = inspected.slice(0, 128) + '...' + } + const type = name.includes('.') ? 'property' : 'argument' + return `The ${type} '${name}' ${reason}. Received ${inspected}` + }, + TypeError +) + +E( + 'ERR_INVALID_RETURN_VALUE', + (input, name, value) => { + const type = value?.constructor?.name ? `instance of ${value.constructor.name}` : `type ${typeof value}` + return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.` + }, + TypeError +) + +E( + 'ERR_MISSING_ARGS', + (...args) => { + assert(args.length > 0, 'At least one arg needs to be specified') + + let msg + const len = args.length + args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ') + + switch (len) { + case 1: + msg += `The ${args[0]} argument` + break + case 2: + msg += `The ${args[0]} and ${args[1]} arguments` + break + default: + { + const last = args.pop() + msg += `The ${args.join(', ')}, and ${last} arguments` + } + break + } + + return `${msg} must be specified` + }, + TypeError +) + +E( + 'ERR_OUT_OF_RANGE', + (str, range, input) => { + assert(range, 'Missing "range" argument') + + let received + + if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { + received = addNumericalSeparator(String(input)) + } else if (typeof input === 'bigint') { + received = String(input) + + if (input > 2n ** 32n || input < -(2n ** 32n)) { + received = addNumericalSeparator(received) + } + + received += 'n' + } else { + received = inspect(input) + } + + return `The value of "${str}" is out of range. It must be ${range}. Received ${received}` + }, + RangeError +) + +E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error) +E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error) +E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error) +E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error) +E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error) +E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError) +E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error) +E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error) +E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error) +E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error) +E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError) + +module.exports = { + AbortError, + aggregateTwoErrors: hideStackFrames(aggregateTwoErrors), + hideStackFrames, + codes +} diff --git a/src/index.js b/src/index.js index e9b59ded0e..6cd12dcfb3 100644 --- a/src/index.js +++ b/src/index.js @@ -32,8 +32,8 @@ if (Stream && process.env.READABLE_STREAM === 'disable') { module.exports.Stream = Stream.Stream } else { - const CustomStream = require('./stream') - const promises = require('./stream/promises') + const CustomStream = require('../stream') + const promises = require('../stream/promises') const originalDestroy = CustomStream.Readable.destroy module.exports = CustomStream.Readable diff --git a/src/primordials.js b/src/primordials.js new file mode 100644 index 0000000000..14e2680bbc --- /dev/null +++ b/src/primordials.js @@ -0,0 +1,101 @@ +'use strict' + +/* + This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at + + https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js + + Don't try to replace with the original file and keep it up to date with the upstream file. +*/ + +module.exports = { + ArrayIsArray(self) { + return Array.isArray(self) + }, + ArrayPrototypeIncludes(self, el) { + return self.includes(el) + }, + ArrayPrototypeIndexOf(self, el) { + return self.indexOf(el) + }, + ArrayPrototypeJoin(self, sep) { + return self.join(sep) + }, + ArrayPrototypeMap(self, fn) { + return self.map(fn) + }, + ArrayPrototypePop(self, el) { + return self.pop(el) + }, + ArrayPrototypePush(self, el) { + return self.push(el) + }, + ArrayPrototypeSlice(self, start, end) { + return self.slice(start, end) + }, + Error, + FunctionPrototypeCall(fn, thisArgs, ...args) { + return fn.call(thisArgs, ...args) + }, + FunctionPrototypeSymbolHasInstance(self, instance) { + return Function.prototype[Symbol.hasInstance].call(self, instance) + }, + MathFloor: Math.floor, + Number, + NumberIsInteger: Number.isInteger, + NumberIsNaN: Number.isNaN, + NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, + NumberParseInt: Number.parseInt, + ObjectDefineProperties(self, props) { + return Object.defineProperties(self, props) + }, + ObjectDefineProperty(self, name, prop) { + return Object.defineProperty(self, name, prop) + }, + ObjectGetOwnPropertyDescriptor(self, name) { + return Object.getOwnPropertyDescriptor(self, name) + }, + ObjectKeys(obj) { + return Object.keys(obj) + }, + ObjectSetPrototypeOf(target, proto) { + return Object.setPrototypeOf(target, proto) + }, + Promise, + PromisePrototypeCatch(self, fn) { + return self.catch(fn) + }, + PromisePrototypeThen(self, thenFn, catchFn) { + return self.then(thenFn, catchFn) + }, + PromiseReject(err) { + return Promise.reject(err) + }, + ReflectApply: Reflect.apply, + RegExpPrototypeTest(self, value) { + return self.test(value) + }, + SafeSet: Set, + String, + StringPrototypeSlice(self, start, end) { + return self.slice(start, end) + }, + StringPrototypeToLowerCase(self) { + return self.toLowerCase() + }, + StringPrototypeToUpperCase(self) { + return self.toUpperCase() + }, + StringPrototypeTrim(self) { + return self.trim() + }, + Symbol, + SymbolAsyncIterator: Symbol.asyncIterator, + SymbolHasInstance: Symbol.hasInstance, + SymbolIterator: Symbol.iterator, + TypedArrayPrototypeSet(self, buf, len) { + return self.set(buf, len) + }, + Uint8Array +} diff --git a/src/test/ours/test-errors.js b/src/test/ours/test-errors.js index 079e1017a3..a300f17075 100644 --- a/src/test/ours/test-errors.js +++ b/src/test/ours/test-errors.js @@ -1,7 +1,7 @@ 'use strict' const t = require('tap') -const { codes: errors } = require('../../lib/internal/errors') +const { codes: errors } = require('../../lib/ours/errors') function checkError(err, Base, name, code, message) { t.ok(err instanceof Base) diff --git a/src/test/ours/test-fake-timers.js b/src/test/ours/test-fake-timers.js index ec16c10f4d..be95e071a7 100644 --- a/src/test/ours/test-fake-timers.js +++ b/src/test/ours/test-fake-timers.js @@ -4,7 +4,7 @@ require('../common') const t = require('tap') const util = require('util') const fakeTimers = require('@sinonjs/fake-timers') -const Transform = require('../../lib').Transform +const Transform = require('../../lib/ours/index').Transform t.plan(1) diff --git a/src/test/ours/test-stream-sync-write.js b/src/test/ours/test-stream-sync-write.js index 7a467ec2dd..a12085a238 100644 --- a/src/test/ours/test-stream-sync-write.js +++ b/src/test/ours/test-stream-sync-write.js @@ -3,7 +3,7 @@ require('../common') const t = require('tap') const util = require('util') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') const WritableStream = stream.Writable t.plan(1) diff --git a/src/test/test-browser.js b/src/test/test-browser.js index e7ad511125..b555996722 100644 --- a/src/test/test-browser.js +++ b/src/test/test-browser.js @@ -2,33 +2,6 @@ const test = require('tape') -// if (!global.console) { -// global.console = {} -// } -// if (!global.console.log) { -// global.console.log = function () {} -// } -// if (!global.console.error) { -// global.console.error = global.console.log -// } -// if (!global.console.info) { -// global.console.info = global.console.log -// } - -// // TODO: add replacements instead -// global.process = { -// env: {}, -// on: function () {}, -// cwd: function () { -// return '/' -// }, -// binding: function () { -// return { -// hasTracing: false -// } -// } -// } - test('streams', function (t) { require('./browser/test-stream-big-packet')(t) require('./browser/test-stream-big-push')(t) @@ -68,8 +41,8 @@ test('streams 2', function (t) { require('./browser/test-stream2-pipe-error-once-listener')(t) require('./browser/test-stream2-push')(t) require('./browser/test-stream2-readable-empty-buffer-no-eof')(t) - // require('./browser/test-stream2-readable-from-list')(t); - // require('./browser/test-stream2-transform')(t); + require('./browser/test-stream2-readable-from-list')(t) + require('./browser/test-stream2-transform')(t) require('./browser/test-stream2-set-encoding')(t) require('./browser/test-stream2-readable-legacy-drain')(t) require('./browser/test-stream2-readable-wrap-empty')(t) diff --git a/src/util.js b/src/util.js index 393a56e534..210fa18fb5 100644 --- a/src/util.js +++ b/src/util.js @@ -1,9 +1,8 @@ 'use strict' +const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor + module.exports = { - debuglog() { - return function () {} - }, once(callback) { let called = false return function (...args) { @@ -14,64 +13,52 @@ module.exports = { callback.apply(this, args) } }, - promisify: function (fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err) - } - return resolve(...args) - }) - }) - }, createDeferredPromise: function () { let resolve let reject - // eslint-disable-next-line promise/param-names const promise = new Promise((res, rej) => { resolve = res reject = rej }) - return { promise, resolve, reject } }, - isError(err) { - return err instanceof Error + // All following functions are just used in browser + debuglog() { + return function () {} }, - ...require('util').types, - // isAsyncFunction, - // isArrayBufferView, - // isRegExp, - // isDate, + format(format, ...args) { + // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args + return format.replace(/%([sdifj])/g, function (...[_unused, type]) { + const replacement = args.shift() - // isAnyArrayBuffer, - // isDataView, - // isPromise, - // isWeakSet, - // isWeakMap, - // isModuleNamespaceObject, - // isBoxedPrimitive, - // isExternal, - // isArgumentsObject, - // isGeneratorFunction, - // Keep in sync with https://github.com/nodejs/node/blob/master/typings/internalBinding/util.d.ts - propertyFilter: { - ALL_PROPERTIES: 0, - ONLY_ENUMERABLE: 2 - }, - // The following methods are not 100% accurate, but there are no equivalent on user-land JS outside of V8 - getProxyDetails(proxy) { - return undefined - }, - getConstructorName(obj) { - return obj !== 'undefined' ? 'undefined' : obj.constructor?.name ?? 'Object' + if (type === 'f') { + return replacement.toFixed(6) + } else if (type === 'j') { + return JSON.stringify(replacement) + } else { + return replacement.toString() + } + }) }, - getOwnNonIndexProperties(obj) { - return Object.getOwnPropertyNames(obj) + promisify(fn) { + return new Promise((resolve, reject) => { + fn((err, ...args) => { + if (err) { + return reject(err) + } + return resolve(...args) + }) + }) }, - join(arr, separator) { - return arr.join(separator) + inspect: require('object-inspect'), + types: { + isAsyncFunction(fn) { + return fn instanceof AsyncFunction + }, + isArrayBufferView(arr) { + return ArrayBuffer.isView(arr) + } } } diff --git a/test/common/index.js b/test/common/index.js index 3646f7864f..147676c34a 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -120,6 +120,17 @@ const isFreeBSD = process.platform === 'freebsd'; const isOpenBSD = process.platform === 'openbsd'; const isLinux = process.platform === 'linux'; const isOSX = process.platform === 'darwin'; +const isPi = (() => { + try { + // Normal Raspberry Pi detection is to find the `Raspberry Pi` string in + // the contents of `/sys/firmware/devicetree/base/model` but that doesn't + // work inside a container. Match the chipset model number instead. + const cpuinfo = fs.readFileSync('/proc/cpuinfo', { encoding: 'utf8' }); + return /^Hardware\s*:\s*(.*)$/im.exec(cpuinfo)?.[1] === 'BCM2835'; + } catch { + return false; + } +})(); const isDumbTerminal = process.env.TERM === 'dumb'; @@ -246,15 +257,10 @@ function platformTimeout(ms) { if (isAIX) return multipliers.two * ms; // Default localhost speed is slower on AIX - if (process.arch !== 'arm') - return ms; - - const armv = process.config.variables.arm_version; + if (isPi) + return multipliers.two * ms; // Raspberry Pi devices - if (armv === '7') - return multipliers.two * ms; // ARMv7 - - return ms; // ARMv8+ + return ms; } @@ -307,13 +313,7 @@ if (global.structuredClone) { } if (global.fetch) { - knownGlobals.push( - global.fetch, - global.FormData, - global.Request, - global.Response, - global.Headers, - ); + knownGlobals.push(fetch); } if (hasCrypto && global.crypto) { knownGlobals.push(global.crypto); @@ -321,6 +321,27 @@ if (hasCrypto && global.crypto) { knownGlobals.push(global.CryptoKey); knownGlobals.push(global.SubtleCrypto); } +if (global.ReadableStream) { + knownGlobals.push( + global.ReadableStream, + global.ReadableStreamDefaultReader, + global.ReadableStreamBYOBReader, + global.ReadableStreamBYOBRequest, + global.ReadableByteStreamController, + global.ReadableStreamDefaultController, + global.TransformStream, + global.TransformStreamDefaultController, + global.WritableStream, + global.WritableStreamDefaultWriter, + global.WritableStreamDefaultController, + global.ByteLengthQueuingStrategy, + global.CountQueuingStrategy, + global.TextEncoderStream, + global.TextDecoderStream, + global.CompressionStream, + global.DecompressionStream, + ); +} function allowGlobals(...allowlist) { knownGlobals = knownGlobals.concat(allowlist); @@ -802,6 +823,7 @@ const common = { isMainThread, isOpenBSD, isOSX, + isPi, isSunOS, isWindows, localIPv6Hosts, @@ -836,7 +858,7 @@ const common = { const re = isWindows ? /Loopback Pseudo-Interface/ : /lo/; return Object.keys(iFaces).some((name) => { return re.test(name) && - iFaces[name].some(({ family }) => family === 'IPv6'); + iFaces[name].some(({ family }) => family === 6); }); }, diff --git a/test/ours/test-errors.js b/test/ours/test-errors.js index 079e1017a3..a300f17075 100644 --- a/test/ours/test-errors.js +++ b/test/ours/test-errors.js @@ -1,7 +1,7 @@ 'use strict' const t = require('tap') -const { codes: errors } = require('../../lib/internal/errors') +const { codes: errors } = require('../../lib/ours/errors') function checkError(err, Base, name, code, message) { t.ok(err instanceof Base) diff --git a/test/ours/test-fake-timers.js b/test/ours/test-fake-timers.js index ec16c10f4d..be95e071a7 100644 --- a/test/ours/test-fake-timers.js +++ b/test/ours/test-fake-timers.js @@ -4,7 +4,7 @@ require('../common') const t = require('tap') const util = require('util') const fakeTimers = require('@sinonjs/fake-timers') -const Transform = require('../../lib').Transform +const Transform = require('../../lib/ours/index').Transform t.plan(1) diff --git a/test/ours/test-stream-sync-write.js b/test/ours/test-stream-sync-write.js index 7a467ec2dd..a12085a238 100644 --- a/test/ours/test-stream-sync-write.js +++ b/test/ours/test-stream-sync-write.js @@ -3,7 +3,7 @@ require('../common') const t = require('tap') const util = require('util') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') const WritableStream = stream.Writable t.plan(1) diff --git a/test/parallel/test-readable-from-iterator-closing.js b/test/parallel/test-readable-from-iterator-closing.js index c46e3fb958..0bcb4e66be 100644 --- a/test/parallel/test-readable-from-iterator-closing.js +++ b/test/parallel/test-readable-from-iterator-closing.js @@ -6,7 +6,7 @@ ; const { mustCall, mustNotCall } = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const { strictEqual } = require('assert'); async function asyncSupport() { diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js index b433ea52f4..d1bdd26cda 100644 --- a/test/parallel/test-readable-from.js +++ b/test/parallel/test-readable-from.js @@ -7,7 +7,7 @@ const { mustCall } = require('../common'); const { once } = require('events'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const { strictEqual, throws } = require('assert'); const common = require('../common'); diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js index cda0d95e89..fbfee6acbe 100644 --- a/test/parallel/test-readable-large-hwm.js +++ b/test/parallel/test-readable-large-hwm.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); // Make sure that readable completes // even when reading larger buffer. diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js index 1d2c231784..4c274eb0b6 100644 --- a/test/parallel/test-readable-single-end.js +++ b/test/parallel/test-readable-single-end.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); // This test ensures that there will not be an additional empty 'readable' // event when stream has ended (only 1 event signalling about end) diff --git a/test/parallel/test-stream-add-abort-signal.js b/test/parallel/test-stream-add-abort-signal.js index 71af04d618..ba582aeb1d 100644 --- a/test/parallel/test-stream-add-abort-signal.js +++ b/test/parallel/test-stream-add-abort-signal.js @@ -8,7 +8,7 @@ require('../common'); const assert = require('assert'); -const { addAbortSignal, Readable } = require('../../lib'); +const { addAbortSignal, Readable } = require('../../lib/ours/index'); const { addAbortSignalNoValidate, } = require('../../lib/internal/streams/add-abort-signal'); diff --git a/test/parallel/test-stream-aliases-legacy.js b/test/parallel/test-stream-aliases-legacy.js index 629b6a3986..f30c305198 100644 --- a/test/parallel/test-stream-aliases-legacy.js +++ b/test/parallel/test-stream-aliases-legacy.js @@ -8,7 +8,7 @@ require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); // Verify that all individual aliases are left in place. diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs index 147eabef09..a103920eef 100644 --- a/test/parallel/test-stream-asIndexedPairs.mjs +++ b/test/parallel/test-stream-asIndexedPairs.mjs @@ -1,5 +1,5 @@ import '../common/index.mjs'; -import { Readable }from '../../lib/index.js'; +import { Readable }from '../../lib/ours/index.js'; import { deepStrictEqual, rejects, throws } from 'assert'; import tap from 'tap'; diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js index eef741b30d..f1f4ade5e7 100644 --- a/test/parallel/test-stream-auto-destroy.js +++ b/test/parallel/test-stream-auto-destroy.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js index 961aa1eafd..5cd9c7e613 100644 --- a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js +++ b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { PassThrough } = require('../../lib'); +const { PassThrough } = require('../../lib/ours/index'); const encode = new PassThrough({ highWaterMark: 1 diff --git a/test/parallel/test-stream-backpressure.js b/test/parallel/test-stream-backpressure.js index b9051924e1..0265970ae2 100644 --- a/test/parallel/test-stream-backpressure.js +++ b/test/parallel/test-stream-backpressure.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); let pushes = 0; const total = 65500 + 40 * 1024; diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js index 5b5b0a4bff..671a6965e3 100644 --- a/test/parallel/test-stream-big-packet.js +++ b/test/parallel/test-stream-big-packet.js @@ -27,7 +27,7 @@ ; require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); let passed = false; diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js index b929e2abf1..62190aa834 100644 --- a/test/parallel/test-stream-big-push.js +++ b/test/parallel/test-stream-big-push.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const str = 'asdfasdfasdfasdfasdf'; const r = new stream.Readable({ diff --git a/test/parallel/test-stream-catch-rejections.js b/test/parallel/test-stream-catch-rejections.js index 82c76dd827..cf65a8c24f 100644 --- a/test/parallel/test-stream-catch-rejections.js +++ b/test/parallel/test-stream-catch-rejections.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-compose.js b/test/parallel/test-stream-compose.js index ae4db7ff34..1cafda10cd 100644 --- a/test/parallel/test-stream-compose.js +++ b/test/parallel/test-stream-compose.js @@ -14,7 +14,7 @@ const { Writable, finished, PassThrough -} = require('../../lib'); +} = require('../../lib/ours/index'); const compose = require('../../lib/internal/streams/compose'); const assert = require('assert'); diff --git a/test/parallel/test-stream-construct-async-error.js b/test/parallel/test-stream-construct-async-error.js deleted file mode 100644 index c62b033dec..0000000000 --- a/test/parallel/test-stream-construct-async-error.js +++ /dev/null @@ -1,255 +0,0 @@ - - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; - -const common = require('../common'); -const { - Duplex, - Writable, - Transform, -} = require('../../lib'); - - const st = require('timers').setTimeout; - - function setTimeout(ms) { - return new Promise(resolve => { - st(resolve, ms); - }); - } - -const assert = require('assert'); - -{ - class Foo extends Duplex { - async _destroy(err, cb) { - await setTimeout(common.platformTimeout(1)); - throw new Error('boom'); - } - } - - const foo = new Foo(); - foo.destroy(); - foo.on('error', common.expectsError({ - message: 'boom' - })); - foo.on('close', common.mustCall(() => { - assert(foo.destroyed); - })); -} - -{ - class Foo extends Duplex { - async _destroy(err, cb) { - await setTimeout(common.platformTimeout(1)); - } - } - - const foo = new Foo(); - foo.destroy(); - foo.on('close', common.mustCall(() => { - assert(foo.destroyed); - })); -} - -{ - class Foo extends Duplex { - async _construct() { - await setTimeout(common.platformTimeout(1)); - } - - _write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - - _read() {} - } - - const foo = new Foo(); - foo.write('test', common.mustCall()); -} - -{ - class Foo extends Duplex { - async _construct(callback) { - await setTimeout(common.platformTimeout(1)); - callback(); - } - - _write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - - _read() {} - } - - const foo = new Foo(); - foo.write('test', common.mustCall()); - foo.on('error', common.mustNotCall()); -} - -{ - class Foo extends Writable { - _write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - - async _final() { - await setTimeout(common.platformTimeout(1)); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('finish', common.mustCall()); -} - -{ - class Foo extends Writable { - _write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - - async _final(callback) { - await setTimeout(common.platformTimeout(1)); - callback(); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('finish', common.mustCall()); -} - -{ - class Foo extends Writable { - _write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - - async _final() { - await setTimeout(common.platformTimeout(1)); - throw new Error('boom'); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('error', common.expectsError({ - message: 'boom' - })); - foo.on('close', common.mustCall()); -} - -{ - const expected = ['hello', 'world']; - class Foo extends Transform { - async _flush() { - return 'world'; - } - - _transform(chunk, encoding, callback) { - callback(null, chunk); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString(), expected.shift()); - }, 2)); -} - -{ - const expected = ['hello', 'world']; - class Foo extends Transform { - async _flush(callback) { - callback(null, 'world'); - } - - _transform(chunk, encoding, callback) { - callback(null, chunk); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString(), expected.shift()); - }, 2)); -} - -{ - class Foo extends Transform { - async _flush(callback) { - throw new Error('boom'); - } - - _transform(chunk, encoding, callback) { - callback(null, chunk); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('data', common.mustCall()); - foo.on('error', common.expectsError({ - message: 'boom' - })); - foo.on('close', common.mustCall()); -} - -{ - class Foo extends Transform { - async _transform(chunk) { - return chunk.toString().toUpperCase(); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString(), 'HELLO'); - })); -} - -{ - class Foo extends Transform { - async _transform(chunk, _, callback) { - callback(null, chunk.toString().toUpperCase()); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString(), 'HELLO'); - })); -} - -{ - class Foo extends Transform { - async _transform() { - throw new Error('boom'); - } - } - - const foo = new Foo(); - foo.end('hello'); - foo.on('error', common.expectsError({ - message: 'boom' - })); - foo.on('close', common.mustCall()); -} - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ diff --git a/test/parallel/test-stream-construct.js b/test/parallel/test-stream-construct.js index 2f25ce30dc..86165797bf 100644 --- a/test/parallel/test-stream-construct.js +++ b/test/parallel/test-stream-construct.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Writable, Readable, Duplex } = require('../../lib'); +const { Writable, Readable, Duplex } = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-decoder-objectmode.js b/test/parallel/test-stream-decoder-objectmode.js index 8a7a09356c..96ae916cfb 100644 --- a/test/parallel/test-stream-decoder-objectmode.js +++ b/test/parallel/test-stream-decoder-objectmode.js @@ -6,7 +6,7 @@ ; require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); const readable = new stream.Readable({ diff --git a/test/parallel/test-stream-destroy-event-order.js b/test/parallel/test-stream-destroy-event-order.js index 6c7d3023ed..4f5775449c 100644 --- a/test/parallel/test-stream-destroy-event-order.js +++ b/test/parallel/test-stream-destroy-event-order.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const rs = new Readable({ read() {} diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js index aaaa3bde31..d99c38f541 100644 --- a/test/parallel/test-stream-drop-take.js +++ b/test/parallel/test-stream-drop-take.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Readable, -} = require('../../lib'); +} = require('../../lib/ours/index'); const { deepStrictEqual, rejects, throws } = require('assert'); const { from } = Readable; diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js index f7b6a3d633..6b6b5e9a42 100644 --- a/test/parallel/test-stream-duplex-destroy.js +++ b/test/parallel/test-stream-duplex-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Duplex } = require('../../lib'); +const { Duplex } = require('../../lib/ours/index'); const assert = require('assert'); { @@ -130,7 +130,7 @@ const assert = require('assert'); duplex.removeListener('end', fail); duplex.removeListener('finish', fail); duplex.on('end', common.mustNotCall()); - duplex.on('finish', common.mustCall()); + duplex.on('finish', common.mustNotCall()); assert.strictEqual(duplex.destroyed, true); } diff --git a/test/parallel/test-stream-duplex-end.js b/test/parallel/test-stream-duplex-end.js index ee442fcc2c..5c10daaa59 100644 --- a/test/parallel/test-stream-duplex-end.js +++ b/test/parallel/test-stream-duplex-end.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const Duplex = require('../../lib').Duplex; +const Duplex = require('../../lib/ours/index').Duplex; { const stream = new Duplex({ diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js index de972fcc6a..b63965f43d 100644 --- a/test/parallel/test-stream-duplex-from.js +++ b/test/parallel/test-stream-duplex-from.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Duplex, Readable, Writable, pipeline } = require('../../lib'); +const { Duplex, Readable, Writable, pipeline } = require('../../lib/ours/index'); const { Blob } = require('buffer'); { diff --git a/test/parallel/test-stream-duplex-props.js b/test/parallel/test-stream-duplex-props.js index 470bce950a..4680d355fd 100644 --- a/test/parallel/test-stream-duplex-props.js +++ b/test/parallel/test-stream-duplex-props.js @@ -7,7 +7,7 @@ require('../common'); const assert = require('assert'); -const { Duplex } = require('../../lib'); +const { Duplex } = require('../../lib/ours/index'); { const d = new Duplex({ diff --git a/test/parallel/test-stream-duplex-readable-end.js b/test/parallel/test-stream-duplex-readable-end.js index f71506c903..60012c4ba7 100644 --- a/test/parallel/test-stream-duplex-readable-end.js +++ b/test/parallel/test-stream-duplex-readable-end.js @@ -7,7 +7,7 @@ // https://github.com/nodejs/node/issues/35926 const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); let loops = 5; diff --git a/test/parallel/test-stream-duplex-readable-writable.js b/test/parallel/test-stream-duplex-readable-writable.js index 3599e006c2..78655e043a 100644 --- a/test/parallel/test-stream-duplex-readable-writable.js +++ b/test/parallel/test-stream-duplex-readable-writable.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Duplex } = require('../../lib'); +const { Duplex } = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-duplex-writable-finished.js b/test/parallel/test-stream-duplex-writable-finished.js index e1fe855181..715af20acd 100644 --- a/test/parallel/test-stream-duplex-writable-finished.js +++ b/test/parallel/test-stream-duplex-writable-finished.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Duplex } = require('../../lib'); +const { Duplex } = require('../../lib/ours/index'); const assert = require('assert'); // basic diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js index 90eb91da76..fc5b5eee31 100644 --- a/test/parallel/test-stream-duplex.js +++ b/test/parallel/test-stream-duplex.js @@ -27,7 +27,7 @@ require('../common'); const assert = require('assert'); -const Duplex = require('../../lib').Duplex; +const Duplex = require('../../lib/ours/index').Duplex; const stream = new Duplex({ objectMode: true }); diff --git a/test/parallel/test-stream-end-of-streams.js b/test/parallel/test-stream-end-of-streams.js new file mode 100644 index 0000000000..b4207da37a --- /dev/null +++ b/test/parallel/test-stream-end-of-streams.js @@ -0,0 +1,35 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; +require('../common'); +const assert = require('assert'); + +const { Duplex, finished } = require('../../lib/ours/index'); + +assert.throws( + () => { + // Passing empty object to mock invalid stream + // should throw error + finished({}, () => {}); + }, + { code: 'ERR_INVALID_ARG_TYPE' } +); + +const streamObj = new Duplex(); +streamObj.end(); +// Below code should not throw any errors as the +// streamObj is `Stream` +finished(streamObj, () => {}); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js index 5d02fff6e7..b9ec00a39b 100644 --- a/test/parallel/test-stream-end-paused.js +++ b/test/parallel/test-stream-end-paused.js @@ -30,7 +30,7 @@ const assert = require('assert'); // Make sure we don't miss the end event for paused 0-length streams -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const stream = new Readable(); let calledRead = false; stream._read = function() { diff --git a/test/parallel/test-stream-error-once.js b/test/parallel/test-stream-error-once.js index b9a9796e47..f69bd88388 100644 --- a/test/parallel/test-stream-error-once.js +++ b/test/parallel/test-stream-error-once.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Writable, Readable } = require('../../lib'); +const { Writable, Readable } = require('../../lib/ours/index'); { const writable = new Writable(); diff --git a/test/parallel/test-stream-events-prepend.js b/test/parallel/test-stream-events-prepend.js index 9564b2f828..4996de08a9 100644 --- a/test/parallel/test-stream-events-prepend.js +++ b/test/parallel/test-stream-events-prepend.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); class Writable extends stream.Writable { constructor() { diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js index e5dfddc0ce..aad1bcd051 100644 --- a/test/parallel/test-stream-filter.js +++ b/test/parallel/test-stream-filter.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Readable, -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); const { once } = require('events'); diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js index 95524c9fe7..77b55d24c6 100644 --- a/test/parallel/test-stream-finished.js +++ b/test/parallel/test-stream-finished.js @@ -14,7 +14,7 @@ const { Duplex, PassThrough, Stream, -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); const EE = require('events'); const fs = require('fs'); @@ -265,7 +265,12 @@ const http = require('http'); const streamLike = new EE(); streamLike.readableEnded = true; streamLike.readable = true; - finished(streamLike, common.mustCall()); + assert.throws( + () => { + finished(streamLike, () => {}); + }, + { code: 'ERR_INVALID_ARG_TYPE' } + ); streamLike.emit('close'); } @@ -618,8 +623,10 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); const w = new Writable(); const _err = new Error(); w.destroy(_err); + assert.strictEqual(w.errored, _err); finished(w, common.mustCall((err) => { assert.strictEqual(_err, err); + assert.strictEqual(w.closed, true); finished(w, common.mustCall((err) => { assert.strictEqual(_err, err); })); @@ -629,7 +636,9 @@ testClosed((opts) => new Writable({ write() {}, ...opts })); { const w = new Writable(); w.destroy(); + assert.strictEqual(w.errored, null); finished(w, common.mustCall((err) => { + assert.strictEqual(w.closed, true); assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); finished(w, common.mustCall((err) => { assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js index db00e554c1..07d569d5fa 100644 --- a/test/parallel/test-stream-flatMap.js +++ b/test/parallel/test-stream-flatMap.js @@ -9,7 +9,7 @@ const common = require('../common'); const fixtures = require('../common/fixtures'); const { Readable, -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); const st = require('timers').setTimeout; diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js index a88c323619..39e5e34efc 100644 --- a/test/parallel/test-stream-forEach.js +++ b/test/parallel/test-stream-forEach.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Readable, -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); const { once } = require('events'); diff --git a/test/parallel/test-stream-inheritance.js b/test/parallel/test-stream-inheritance.js index 1c0fbfb0fe..3507a337c4 100644 --- a/test/parallel/test-stream-inheritance.js +++ b/test/parallel/test-stream-inheritance.js @@ -6,7 +6,7 @@ ; require('../common'); const assert = require('assert'); -const { Readable, Writable, Duplex, Transform } = require('../../lib'); +const { Readable, Writable, Duplex, Transform } = require('../../lib/ours/index'); const readable = new Readable({ read() {} }); const writable = new Writable({ write() {} }); diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js index e2efd498c3..582f52b447 100644 --- a/test/parallel/test-stream-ispaused.js +++ b/test/parallel/test-stream-ispaused.js @@ -27,7 +27,7 @@ ; require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const readable = new stream.Readable(); diff --git a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs index 44507cd244..9f09abeab6 100644 --- a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs +++ b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs @@ -1,5 +1,5 @@ import { mustCall } from '../common/index.mjs'; -import { Readable }from '../../lib/index.js'; +import { Readable }from '../../lib/ours/index.js'; import assert from 'assert'; import tap from 'tap'; @@ -69,7 +69,7 @@ import tap from 'tap'; ); assert.strictEqual(descriptor.enumerable, false); assert.strictEqual(descriptor.configurable, true); - // assert.strictEqual(descriptor.writable, true); + assert.strictEqual(descriptor.writable, true); } { // drop/length @@ -80,7 +80,7 @@ import tap from 'tap'; ); assert.strictEqual(descriptor.enumerable, false); assert.strictEqual(descriptor.configurable, true); - // assert.strictEqual(descriptor.writable, true); + assert.strictEqual(descriptor.writable, true); // drop/limit-equals-total const iterator = Readable.from([1, 2]).drop(2); const result = await iterator[Symbol.asyncIterator]().next(); diff --git a/test/parallel/test-stream-objectmode-undefined.js b/test/parallel/test-stream-objectmode-undefined.js index 84e5038162..2b4ac5e1c4 100644 --- a/test/parallel/test-stream-objectmode-undefined.js +++ b/test/parallel/test-stream-objectmode-undefined.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable, Writable, Transform } = require('../../lib'); +const { Readable, Writable, Transform } = require('../../lib/ours/index'); { const stream = new Readable({ diff --git a/test/parallel/test-stream-once-readable-pipe.js b/test/parallel/test-stream-once-readable-pipe.js index 19408686a0..10a7824c75 100644 --- a/test/parallel/test-stream-once-readable-pipe.js +++ b/test/parallel/test-stream-once-readable-pipe.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); // This test ensures that if have 'readable' listener // on Readable instance it will not disrupt the pipe. diff --git a/test/parallel/test-stream-passthrough-drain.js b/test/parallel/test-stream-passthrough-drain.js index 3506bc901e..e288bccc73 100644 --- a/test/parallel/test-stream-passthrough-drain.js +++ b/test/parallel/test-stream-passthrough-drain.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { PassThrough } = require('../../lib'); +const { PassThrough } = require('../../lib/ours/index'); const pt = new PassThrough({ highWaterMark: 0 }); pt.on('drain', common.mustCall()); diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js index 4a5b5a3237..1370dcdc93 100644 --- a/test/parallel/test-stream-pipe-after-end.js +++ b/test/parallel/test-stream-pipe-after-end.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); class TestReadable extends Readable { constructor(opt) { diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js index 8fbb9ba2b9..4f0f2820a1 100644 --- a/test/parallel/test-stream-pipe-await-drain-manual-resume.js +++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); // A consumer stream with a very low highWaterMark, which starts in a state diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js index ac548dbb9a..b98d427f3f 100644 --- a/test/parallel/test-stream-pipe-await-drain-push-while-write.js +++ b/test/parallel/test-stream-pipe-await-drain-push-while-write.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); const writable = new stream.Writable({ diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js index 96360af761..c6dfa9e0ae 100644 --- a/test/parallel/test-stream-pipe-await-drain.js +++ b/test/parallel/test-stream-pipe-await-drain.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); // This is very similar to test-stream-pipe-cleanup-pause.js. diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js index 9490d8944a..fc411bc399 100644 --- a/test/parallel/test-stream-pipe-cleanup-pause.js +++ b/test/parallel/test-stream-pipe-cleanup-pause.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const reader = new stream.Readable(); const writer1 = new stream.Writable(); diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js index 7e3b13689f..3fb54ef8c3 100644 --- a/test/parallel/test-stream-pipe-cleanup.js +++ b/test/parallel/test-stream-pipe-cleanup.js @@ -28,7 +28,7 @@ // This test asserts that Stream.prototype.pipe does not leave listeners // hanging on the source or dest. require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); function Writable() { diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js index 531e979fd5..200d561524 100644 --- a/test/parallel/test-stream-pipe-error-handling.js +++ b/test/parallel/test-stream-pipe-error-handling.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Stream, PassThrough } = require('../../lib'); +const { Stream, PassThrough } = require('../../lib/ours/index'); { const source = new Stream(); diff --git a/test/parallel/test-stream-pipe-error-unhandled.js b/test/parallel/test-stream-pipe-error-unhandled.js index 43b0daeac6..1fc1381c9a 100644 --- a/test/parallel/test-stream-pipe-error-unhandled.js +++ b/test/parallel/test-stream-pipe-error-unhandled.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); process.on('uncaughtException', common.mustCall((err) => { assert.strictEqual(err.message, 'asd'); diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js index d7f991a298..f78cb1dfca 100644 --- a/test/parallel/test-stream-pipe-event.js +++ b/test/parallel/test-stream-pipe-event.js @@ -26,7 +26,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); function Writable() { diff --git a/test/parallel/test-stream-pipe-flow-after-unpipe.js b/test/parallel/test-stream-pipe-flow-after-unpipe.js index 0cf392be68..d72de00eb7 100644 --- a/test/parallel/test-stream-pipe-flow-after-unpipe.js +++ b/test/parallel/test-stream-pipe-flow-after-unpipe.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); // Tests that calling .unpipe() un-blocks a stream that is paused because // it is waiting on the writable side to finish a write(). diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js index f3f0908c67..72932b8546 100644 --- a/test/parallel/test-stream-pipe-flow.js +++ b/test/parallel/test-stream-pipe-flow.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable, Writable, PassThrough } = require('../../lib'); +const { Readable, Writable, PassThrough } = require('../../lib/ours/index'); { let ticks = 17; diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js index cbf024f00f..8abbb6ddd0 100644 --- a/test/parallel/test-stream-pipe-manual-resume.js +++ b/test/parallel/test-stream-pipe-manual-resume.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); function test(throwCodeInbetween) { // Check that a pipe does not stall if .read() is called unexpectedly diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js index 7e6d83d43d..55f7b7203d 100644 --- a/test/parallel/test-stream-pipe-multiple-pipes.js +++ b/test/parallel/test-stream-pipe-multiple-pipes.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); const readable = new stream.Readable({ diff --git a/test/parallel/test-stream-pipe-needDrain.js b/test/parallel/test-stream-pipe-needDrain.js index e4abd8a7fa..1da99efba5 100644 --- a/test/parallel/test-stream-pipe-needDrain.js +++ b/test/parallel/test-stream-pipe-needDrain.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); // Pipe should pause temporarily if writable needs drain. { diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js index 5df30fc899..4283c69a77 100644 --- a/test/parallel/test-stream-pipe-same-destination-twice.js +++ b/test/parallel/test-stream-pipe-same-destination-twice.js @@ -10,7 +10,7 @@ const common = require('../common'); // Tests that piping a source stream twice to the same destination stream // works, and that a subsequent unpipe() call only removes the pipe *once*. const assert = require('assert'); -const { PassThrough, Writable } = require('../../lib'); +const { PassThrough, Writable } = require('../../lib/ours/index'); { const passThrough = new PassThrough(); diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js index c31f4fab10..bf011ff81b 100644 --- a/test/parallel/test-stream-pipe-unpipe-streams.js +++ b/test/parallel/test-stream-pipe-unpipe-streams.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); const source = Readable({ read: () => {} }); const dest1 = Writable({ write: () => {} }); diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js index 8576d18eb0..91c8beed6e 100644 --- a/test/parallel/test-stream-pipe-without-listenerCount.js +++ b/test/parallel/test-stream-pipe-without-listenerCount.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const r = new stream.Stream(); r.listenerCount = undefined; diff --git a/test/parallel/test-stream-pipeline-async-iterator.js b/test/parallel/test-stream-pipeline-async-iterator.js index 241ab9b1c1..49bda68161 100644 --- a/test/parallel/test-stream-pipeline-async-iterator.js +++ b/test/parallel/test-stream-pipeline-async-iterator.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable, PassThrough, pipeline } = require('../../lib'); +const { Readable, PassThrough, pipeline } = require('../../lib/ours/index'); const assert = require('assert'); const _err = new Error('kaboom'); diff --git a/test/parallel/test-stream-pipeline-http2.js b/test/parallel/test-stream-pipeline-http2.js index c0b02fc98c..97cb73b26c 100644 --- a/test/parallel/test-stream-pipeline-http2.js +++ b/test/parallel/test-stream-pipeline-http2.js @@ -8,7 +8,7 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); -const { Readable, pipeline } = require('../../lib'); +const { Readable, pipeline } = require('../../lib/ours/index'); const http2 = require('http2'); { diff --git a/test/parallel/test-stream-pipeline-listeners.js b/test/parallel/test-stream-pipeline-listeners.js new file mode 100644 index 0000000000..0c0de2b633 --- /dev/null +++ b/test/parallel/test-stream-pipeline-listeners.js @@ -0,0 +1,91 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +const common = require('../common'); +const { pipeline, Duplex, PassThrough, Writable } = require('../../lib/ours/index'); +const assert = require('assert'); + +process.on('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'no way'); +}, 2)); + +// Ensure that listeners is removed if last stream is readable +// And other stream's listeners unchanged +const a = new PassThrough(); +a.end('foobar'); +const b = new Duplex({ + write(chunk, encoding, callback) { + callback(); + } +}); +pipeline(a, b, common.mustCall((error) => { + if (error) { + assert.ifError(error); + } + + assert(a.listenerCount('error') > 0); + assert.strictEqual(b.listenerCount('error'), 0); + setTimeout(() => { + assert.strictEqual(b.listenerCount('error'), 0); + b.destroy(new Error('no way')); + }, 100); +})); + +// Async generators +const c = new PassThrough(); +c.end('foobar'); +const d = pipeline( + c, + async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase(); + } + }, + common.mustCall((error) => { + if (error) { + assert.ifError(error); + } + + assert(c.listenerCount('error') > 0); + assert.strictEqual(d.listenerCount('error'), 0); + setTimeout(() => { + assert.strictEqual(b.listenerCount('error'), 0); + d.destroy(new Error('no way')); + }, 100); + }) +); + +// If last stream is not readable, will not throw and remove listeners +const e = new PassThrough(); +e.end('foobar'); +const f = new Writable({ + write(chunk, encoding, callback) { + callback(); + } +}); +pipeline(e, f, common.mustCall((error) => { + if (error) { + assert.ifError(error); + } + + assert(e.listenerCount('error') > 0); + assert(f.listenerCount('error') > 0); + setTimeout(() => { + assert(f.listenerCount('error') > 0); + f.destroy(new Error('no way')); + }, 100); +})); + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-pipeline-process.js b/test/parallel/test-stream-pipeline-process.js index da75c59153..1b72352e20 100644 --- a/test/parallel/test-stream-pipeline-process.js +++ b/test/parallel/test-stream-pipeline-process.js @@ -10,7 +10,7 @@ const assert = require('assert'); const os = require('os'); if (process.argv[2] === 'child') { - const { pipeline } = require('../../lib'); + const { pipeline } = require('../../lib/ours/index'); pipeline( process.stdin, process.stdout, diff --git a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js index 91089ef329..1dab183bc0 100644 --- a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js +++ b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable, Duplex, pipeline } = require('../../lib'); +const { Readable, Duplex, pipeline } = require('../../lib/ours/index'); // Test that the callback for pipeline() is called even when the ._destroy() // method of the stream places an .end() request to itself that does not diff --git a/test/parallel/test-stream-pipeline-uncaught.js b/test/parallel/test-stream-pipeline-uncaught.js index b83ef016d4..2399c1ce98 100644 --- a/test/parallel/test-stream-pipeline-uncaught.js +++ b/test/parallel/test-stream-pipeline-uncaught.js @@ -9,7 +9,7 @@ const common = require('../common'); const { pipeline, PassThrough -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); process.on('uncaughtException', common.mustCall((err) => { diff --git a/test/parallel/test-stream-pipeline-with-empty-string.js b/test/parallel/test-stream-pipeline-with-empty-string.js index 15f70919ad..0bb2a6c79e 100644 --- a/test/parallel/test-stream-pipeline-with-empty-string.js +++ b/test/parallel/test-stream-pipeline-with-empty-string.js @@ -9,7 +9,7 @@ const common = require('../common'); const { pipeline, PassThrough -} = require('../../lib'); +} = require('../../lib/ours/index'); async function runTest() { diff --git a/test/parallel/test-stream-promises.js b/test/parallel/test-stream-promises.js index 51e86db5ad..d5af56f6f6 100644 --- a/test/parallel/test-stream-promises.js +++ b/test/parallel/test-stream-promises.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const { Readable, Writable, diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js index 96eec48dbb..144e64310c 100644 --- a/test/parallel/test-stream-push-order.js +++ b/test/parallel/test-stream-push-order.js @@ -26,7 +26,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const assert = require('assert'); const s = new Readable({ diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js index e7269e6379..3d56745e2b 100644 --- a/test/parallel/test-stream-push-strings.js +++ b/test/parallel/test-stream-push-strings.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; class MyStream extends Readable { constructor(options) { diff --git a/test/parallel/test-stream-readable-aborted.js b/test/parallel/test-stream-readable-aborted.js index 2d0569129f..56d07c9195 100644 --- a/test/parallel/test-stream-readable-aborted.js +++ b/test/parallel/test-stream-readable-aborted.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable, Duplex } = require('../../lib'); +const { Readable, Duplex } = require('../../lib/ours/index'); { const readable = new Readable({ diff --git a/test/parallel/test-stream-readable-add-chunk-during-data.js b/test/parallel/test-stream-readable-add-chunk-during-data.js index d0d7d2c8c4..135e09031e 100644 --- a/test/parallel/test-stream-readable-add-chunk-during-data.js +++ b/test/parallel/test-stream-readable-add-chunk-during-data.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); // Verify that .push() and .unshift() can be called from 'data' listeners. diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js index fb4a314b34..9c15ab26ab 100644 --- a/test/parallel/test-stream-readable-constructor-set-methods.js +++ b/test/parallel/test-stream-readable-constructor-set-methods.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const _read = common.mustCall(function _read(n) { this.push(null); diff --git a/test/parallel/test-stream-readable-data.js b/test/parallel/test-stream-readable-data.js index 2589f96326..eef9a7df7e 100644 --- a/test/parallel/test-stream-readable-data.js +++ b/test/parallel/test-stream-readable-data.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const readable = new Readable({ read() {} diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js index 56e7fe5944..6bb37e0ab6 100644 --- a/test/parallel/test-stream-readable-destroy.js +++ b/test/parallel/test-stream-readable-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable, addAbortSignal } = require('../../lib'); +const { Readable, addAbortSignal } = require('../../lib/ours/index'); const assert = require('assert'); { @@ -18,6 +18,7 @@ const assert = require('assert'); read.on('close', common.mustCall()); read.destroy(); + assert.strictEqual(read.errored, null); assert.strictEqual(read.destroyed, true); } @@ -36,6 +37,7 @@ const assert = require('assert'); })); read.destroy(expected); + assert.strictEqual(read.errored, expected); assert.strictEqual(read.destroyed, true); } diff --git a/test/parallel/test-stream-readable-didRead.js b/test/parallel/test-stream-readable-didRead.js index d6a093415a..730da10106 100644 --- a/test/parallel/test-stream-readable-didRead.js +++ b/test/parallel/test-stream-readable-didRead.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { isDisturbed, isErrored, Readable } = require('../../lib'); +const { isDisturbed, isErrored, Readable } = require('../../lib/ours/index'); function noop() {} diff --git a/test/parallel/test-stream-readable-emit-readable-short-stream.js b/test/parallel/test-stream-readable-emit-readable-short-stream.js index 4caadc96f0..c0ccb3b161 100644 --- a/test/parallel/test-stream-readable-emit-readable-short-stream.js +++ b/test/parallel/test-stream-readable-emit-readable-short-stream.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-readable-emittedReadable.js b/test/parallel/test-stream-readable-emittedReadable.js index ccb71ab1c5..57f0b82593 100644 --- a/test/parallel/test-stream-readable-emittedReadable.js +++ b/test/parallel/test-stream-readable-emittedReadable.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const readable = new Readable({ read: () => {} diff --git a/test/parallel/test-stream-readable-end-destroyed.js b/test/parallel/test-stream-readable-end-destroyed.js index 4bf5929e0b..39d888e820 100644 --- a/test/parallel/test-stream-readable-end-destroyed.js +++ b/test/parallel/test-stream-readable-end-destroyed.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); { // Don't emit 'end' after 'close'. diff --git a/test/parallel/test-stream-readable-ended.js b/test/parallel/test-stream-readable-ended.js index 7ebc6878b7..a551cfb1f6 100644 --- a/test/parallel/test-stream-readable-ended.js +++ b/test/parallel/test-stream-readable-ended.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const assert = require('assert'); // basic diff --git a/test/parallel/test-stream-readable-error-end.js b/test/parallel/test-stream-readable-error-end.js index 15dafa970d..a9604ece50 100644 --- a/test/parallel/test-stream-readable-error-end.js +++ b/test/parallel/test-stream-readable-error-end.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); { const r = new Readable({ read() {} }); diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js index 7823c68082..743cf74ad0 100644 --- a/test/parallel/test-stream-readable-event.js +++ b/test/parallel/test-stream-readable-event.js @@ -28,7 +28,7 @@ const common = require('../common'); const assert = require('assert'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; { // First test, not reading when the readable is added. diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js index 9db6e29da1..4d18449ee8 100644 --- a/test/parallel/test-stream-readable-flow-recursion.js +++ b/test/parallel/test-stream-readable-flow-recursion.js @@ -33,7 +33,7 @@ const assert = require('assert'); // more data continuously, but without triggering a nextTick // warning or RangeError. -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; // Throw an error if we trigger a nextTick warning. process.throwDeprecation = true; diff --git a/test/parallel/test-stream-readable-hwm-0-async.js b/test/parallel/test-stream-readable-hwm-0-async.js index f09d93c6ab..f3b9c70f5c 100644 --- a/test/parallel/test-stream-readable-hwm-0-async.js +++ b/test/parallel/test-stream-readable-hwm-0-async.js @@ -11,7 +11,7 @@ const common = require('../common'); // for streams with highWaterMark === 0 once the stream returns data // by calling push() asynchronously. -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); let count = 5; diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js index 15bf64ec56..8925218549 100644 --- a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js +++ b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js @@ -14,7 +14,7 @@ const common = require('../common'); // specifically catch any regressions that might occur with these streams. const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const streamData = [ 'a', null ]; diff --git a/test/parallel/test-stream-readable-hwm-0.js b/test/parallel/test-stream-readable-hwm-0.js index 352fa1eb58..4dfd1a7ad4 100644 --- a/test/parallel/test-stream-readable-hwm-0.js +++ b/test/parallel/test-stream-readable-hwm-0.js @@ -12,7 +12,7 @@ const common = require('../common'); // emit 'readable' event. const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const r = new Readable({ // Must be called only once upon setting 'readable' listener diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js index a4cf0a625e..84ac8c662c 100644 --- a/test/parallel/test-stream-readable-infinite-read.js +++ b/test/parallel/test-stream-readable-infinite-read.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const buf = Buffer.alloc(8192); diff --git a/test/parallel/test-stream-readable-invalid-chunk.js b/test/parallel/test-stream-readable-invalid-chunk.js index 0b6069d9e8..4ed7d33a9e 100644 --- a/test/parallel/test-stream-readable-invalid-chunk.js +++ b/test/parallel/test-stream-readable-invalid-chunk.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); function testPushArg(val) { const readable = new stream.Readable({ diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js index 45028dc027..1f0ecaba40 100644 --- a/test/parallel/test-stream-readable-needReadable.js +++ b/test/parallel/test-stream-readable-needReadable.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const readable = new Readable({ read: () => {} diff --git a/test/parallel/test-stream-readable-next-no-null.js b/test/parallel/test-stream-readable-next-no-null.js index 7fa8c8776d..0d72fdd8d0 100644 --- a/test/parallel/test-stream-readable-next-no-null.js +++ b/test/parallel/test-stream-readable-next-no-null.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const { mustNotCall, expectsError } = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); async function* generate() { yield null; diff --git a/test/parallel/test-stream-readable-no-unneeded-readable.js b/test/parallel/test-stream-readable-no-unneeded-readable.js index 36dc08f064..c2620e4cea 100644 --- a/test/parallel/test-stream-readable-no-unneeded-readable.js +++ b/test/parallel/test-stream-readable-no-unneeded-readable.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Readable, PassThrough } = require('../../lib'); +const { Readable, PassThrough } = require('../../lib/ours/index'); function test(r) { const wrapper = new Readable({ diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js index 66fc2b2354..3820cddf16 100644 --- a/test/parallel/test-stream-readable-object-multi-push-async.js +++ b/test/parallel/test-stream-readable-object-multi-push-async.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const MAX = 42; const BATCH = 10; diff --git a/test/parallel/test-stream-readable-pause-and-resume.js b/test/parallel/test-stream-readable-pause-and-resume.js index 005b799528..fbe720ca66 100644 --- a/test/parallel/test-stream-readable-pause-and-resume.js +++ b/test/parallel/test-stream-readable-pause-and-resume.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); let ticks = 18; let expectedData = 19; @@ -63,7 +63,7 @@ function readAndPause() { } { - const { PassThrough } = require('../../lib'); + const { PassThrough } = require('../../lib/ours/index'); const source3 = new PassThrough(); const target3 = new PassThrough(); diff --git a/test/parallel/test-stream-readable-readable-then-resume.js b/test/parallel/test-stream-readable-readable-then-resume.js index c4f8206d63..050795e7ea 100644 --- a/test/parallel/test-stream-readable-readable-then-resume.js +++ b/test/parallel/test-stream-readable-readable-then-resume.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const assert = require('assert'); // This test verifies that a stream could be resumed after diff --git a/test/parallel/test-stream-readable-readable.js b/test/parallel/test-stream-readable-readable.js index 42b7ab5c7f..a7c25ecacd 100644 --- a/test/parallel/test-stream-readable-readable.js +++ b/test/parallel/test-stream-readable-readable.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); { const r = new Readable({ diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js index 447bbbf9b5..9a6d521c1c 100644 --- a/test/parallel/test-stream-readable-reading-readingMore.js +++ b/test/parallel/test-stream-readable-reading-readingMore.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; { const readable = new Readable({ diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js index c32f6487c4..00c7ebec3d 100644 --- a/test/parallel/test-stream-readable-resume-hwm.js +++ b/test/parallel/test-stream-readable-resume-hwm.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); // readable.resume() should not lead to a ._read() call being scheduled // when we exceed the high water mark already. diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js index 9c0da6384f..23ae61a66a 100644 --- a/test/parallel/test-stream-readable-resumeScheduled.js +++ b/test/parallel/test-stream-readable-resumeScheduled.js @@ -9,7 +9,7 @@ const common = require('../common'); // Testing Readable Stream resumeScheduled state const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); { // pipe() test case diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js index 2bc8536716..960aa22203 100644 --- a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js +++ b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-readable-setEncoding-null.js b/test/parallel/test-stream-readable-setEncoding-null.js index 3810288be0..979d0690e6 100644 --- a/test/parallel/test-stream-readable-setEncoding-null.js +++ b/test/parallel/test-stream-readable-setEncoding-null.js @@ -7,7 +7,7 @@ require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); { diff --git a/test/parallel/test-stream-readable-unpipe-resume.js b/test/parallel/test-stream-readable-unpipe-resume.js index 8559e8a743..5ef9c1947e 100644 --- a/test/parallel/test-stream-readable-unpipe-resume.js +++ b/test/parallel/test-stream-readable-unpipe-resume.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const fs = require('fs'); const readStream = fs.createReadStream(process.execPath); diff --git a/test/parallel/test-stream-readable-unshift.js b/test/parallel/test-stream-readable-unshift.js index 0e77bec1f4..73ba9f4668 100644 --- a/test/parallel/test-stream-readable-unshift.js +++ b/test/parallel/test-stream-readable-unshift.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); { // Check that strings are saved as Buffer diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js index d244bc9ac1..8d4ae320d7 100644 --- a/test/parallel/test-stream-readable-with-unimplemented-_read.js +++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const readable = new Readable(); diff --git a/test/parallel/test-stream-readableListening-state.js b/test/parallel/test-stream-readableListening-state.js index 6738463ade..10abe32369 100644 --- a/test/parallel/test-stream-readableListening-state.js +++ b/test/parallel/test-stream-readableListening-state.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const r = new stream.Readable({ read: () => {} diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js index 58c7c2db92..b585d3d106 100644 --- a/test/parallel/test-stream-reduce.js +++ b/test/parallel/test-stream-reduce.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Readable, -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); function sum(p, c) { diff --git a/test/parallel/test-stream-some-find-every.mjs b/test/parallel/test-stream-some-find-every.mjs index 4dfd9a8461..34c8e2a8a2 100644 --- a/test/parallel/test-stream-some-find-every.mjs +++ b/test/parallel/test-stream-some-find-every.mjs @@ -1,6 +1,6 @@ import * as common from '../common/index.mjs'; import { setTimeout } from 'timers/promises'; -import { Readable }from '../../lib/index.js'; +import { Readable }from '../../lib/ours/index.js'; import assert from 'assert'; import tap from 'tap'; diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js index dfaeea8e18..5b0e3d9de9 100644 --- a/test/parallel/test-stream-toArray.js +++ b/test/parallel/test-stream-toArray.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Readable, -} = require('../../lib'); +} = require('../../lib/ours/index'); const assert = require('assert'); { diff --git a/test/parallel/test-stream-transform-callback-twice.js b/test/parallel/test-stream-transform-callback-twice.js index 47c6c85f24..8329911f78 100644 --- a/test/parallel/test-stream-transform-callback-twice.js +++ b/test/parallel/test-stream-transform-callback-twice.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Transform } = require('../../lib'); +const { Transform } = require('../../lib/ours/index'); const stream = new Transform({ transform(chunk, enc, cb) { cb(); cb(); } }); diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js index 4d73a342aa..3901220672 100644 --- a/test/parallel/test-stream-transform-constructor-set-methods.js +++ b/test/parallel/test-stream-transform-constructor-set-methods.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Transform } = require('../../lib'); +const { Transform } = require('../../lib/ours/index'); const t = new Transform(); diff --git a/test/parallel/test-stream-transform-destroy.js b/test/parallel/test-stream-transform-destroy.js index 092f75fa42..9c27d026b5 100644 --- a/test/parallel/test-stream-transform-destroy.js +++ b/test/parallel/test-stream-transform-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Transform } = require('../../lib'); +const { Transform } = require('../../lib/ours/index'); const assert = require('assert'); { @@ -122,7 +122,7 @@ const assert = require('assert'); transform.removeListener('end', fail); transform.removeListener('finish', fail); transform.on('end', common.mustCall()); - transform.on('finish', common.mustCall()); + transform.on('finish', common.mustNotCall()); } { diff --git a/test/parallel/test-stream-transform-final-sync.js b/test/parallel/test-stream-transform-final-sync.js index 3e32c118c6..f88816504b 100644 --- a/test/parallel/test-stream-transform-final-sync.js +++ b/test/parallel/test-stream-transform-final-sync.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); let state = 0; diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js index 549a610cc5..9228159d33 100644 --- a/test/parallel/test-stream-transform-final.js +++ b/test/parallel/test-stream-transform-final.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); let state = 0; diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js index e8ad955f25..0c229eb8ad 100644 --- a/test/parallel/test-stream-transform-flush-data.js +++ b/test/parallel/test-stream-transform-flush-data.js @@ -8,7 +8,7 @@ require('../common'); const assert = require('assert'); -const Transform = require('../../lib').Transform; +const Transform = require('../../lib/ours/index').Transform; const expected = 'asdf'; diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js index 6e41f7983e..e9c6d6e96c 100644 --- a/test/parallel/test-stream-transform-objectmode-falsey-value.js +++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js @@ -28,7 +28,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const PassThrough = stream.PassThrough; const src = new PassThrough({ objectMode: true }); diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js index 9241137dcb..bd4fbcd579 100644 --- a/test/parallel/test-stream-transform-split-highwatermark.js +++ b/test/parallel/test-stream-transform-split-highwatermark.js @@ -7,7 +7,7 @@ require('../common'); const assert = require('assert'); -const { Transform, Readable, Writable } = require('../../lib'); +const { Transform, Readable, Writable } = require('../../lib/ours/index'); const DEFAULT = 16 * 1024; diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js index a50e186225..14461e9fac 100644 --- a/test/parallel/test-stream-transform-split-objectmode.js +++ b/test/parallel/test-stream-transform-split-objectmode.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const Transform = require('../../lib').Transform; +const Transform = require('../../lib/ours/index').Transform; const parser = new Transform({ readableObjectMode: true }); diff --git a/test/parallel/test-stream-uint8array.js b/test/parallel/test-stream-uint8array.js index 93d4444d98..6cd8e19ac8 100644 --- a/test/parallel/test-stream-uint8array.js +++ b/test/parallel/test-stream-uint8array.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); const ABC = new Uint8Array([0x41, 0x42, 0x43]); const DEF = new Uint8Array([0x44, 0x45, 0x46]); diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js index 0f5f2373a8..8bfedef105 100644 --- a/test/parallel/test-stream-unpipe-event.js +++ b/test/parallel/test-stream-unpipe-event.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Writable, Readable } = require('../../lib'); +const { Writable, Readable } = require('../../lib/ours/index'); class NullWriteable extends Writable { _write(chunk, encoding, callback) { return callback(); diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js index 72828aae25..db44b38f79 100644 --- a/test/parallel/test-stream-unshift-empty-chunk.js +++ b/test/parallel/test-stream-unshift-empty-chunk.js @@ -30,7 +30,7 @@ const assert = require('assert'); // This test verifies that stream.unshift(Buffer.alloc(0)) or // stream.unshift('') does not set state.reading=false. -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const r = new Readable(); let nChunks = 10; diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js index e225c15a3d..15445d121a 100644 --- a/test/parallel/test-stream-unshift-read-race.js +++ b/test/parallel/test-stream-unshift-read-race.js @@ -35,7 +35,7 @@ const assert = require('assert'); // 3. push() after the EOF signaling null is an error. // 4. _read() is not called after pushing the EOF null chunk. -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const hwm = 10; const r = stream.Readable({ highWaterMark: hwm, autoDestroy: false }); const chunks = 10; diff --git a/test/parallel/test-stream-wrap-drain.js b/test/parallel/test-stream-wrap-drain.js deleted file mode 100644 index 1af1449596..0000000000 --- a/test/parallel/test-stream-wrap-drain.js +++ /dev/null @@ -1,65 +0,0 @@ -// Flags: --expose-internals - - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { StreamWrap } = require('../../lib/internal/js_stream_socket'); -const { Duplex } = require('../../lib'); -const internalBinding = process.binding -const { ShutdownWrap } = internalBinding('stream_wrap'); - -// This test makes sure that when a wrapped stream is waiting for -// a "drain" event to `doShutdown`, the instance will work correctly when a -// "drain" event emitted. -{ - let resolve = null; - - class TestDuplex extends Duplex { - _write(chunk, encoding, callback) { - // We will resolve the write later. - resolve = () => { - callback(); - }; - } - - _read() {} - } - - const testDuplex = new TestDuplex(); - const socket = new StreamWrap(testDuplex); - - socket.write( - // Make the buffer long enough so that the `Writable` will emit "drain". - Buffer.allocUnsafe(socket.writableHighWaterMark * 2), - common.mustCall() - ); - - // Make sure that the 'drain' events will be emitted. - testDuplex.on('drain', common.mustCall(() => { - silentConsole.log('testDuplex drain'); - })); - - assert.strictEqual(typeof resolve, 'function'); - - const req = new ShutdownWrap(); - req.oncomplete = common.mustCall(); - req.handle = socket._handle; - // Should not throw. - socket._handle.shutdown(req); - - resolve(); -} - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ diff --git a/test/parallel/test-stream-wrap-encoding.js b/test/parallel/test-stream-wrap-encoding.js deleted file mode 100644 index cfc69d965d..0000000000 --- a/test/parallel/test-stream-wrap-encoding.js +++ /dev/null @@ -1,58 +0,0 @@ -// Flags: --expose-internals - - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); - -const StreamWrap = require('../../lib/internal/js_stream_socket'); -const Duplex = require('../../lib').Duplex; - -{ - const stream = new Duplex({ - read() {}, - write() {} - }); - - stream.setEncoding('ascii'); - - const wrap = new StreamWrap(stream); - - wrap.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_WRAP', - message: 'Stream has StringDecoder set or is in objectMode' - })); - - stream.push('ohai'); -} - -{ - const stream = new Duplex({ - read() {}, - write() {}, - objectMode: true - }); - - const wrap = new StreamWrap(stream); - - wrap.on('error', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_WRAP', - message: 'Stream has StringDecoder set or is in objectMode' - })); - - stream.push(new Error('foo')); -} - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ diff --git a/test/parallel/test-stream-wrap.js b/test/parallel/test-stream-wrap.js deleted file mode 100644 index 9c0074f7cf..0000000000 --- a/test/parallel/test-stream-wrap.js +++ /dev/null @@ -1,48 +0,0 @@ -// Flags: --expose-internals - - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); - -const internalBinding = process.binding -const StreamWrap = require('../../lib/internal/js_stream_socket'); -const { Duplex } = require('../../lib'); -const { ShutdownWrap } = internalBinding('stream_wrap'); - -function testShutdown(callback) { - const stream = new Duplex({ - read: function() { - }, - write: function() { - } - }); - - const wrap = new StreamWrap(stream); - - const req = new ShutdownWrap(); - req.oncomplete = function(code) { - assert(code < 0); - callback(); - }; - req.handle = wrap._handle; - - // Close the handle to simulate - wrap.destroy(); - req.handle.shutdown(req); -} - -testShutdown(common.mustCall()); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ diff --git a/test/parallel/test-stream-writable-aborted.js b/test/parallel/test-stream-writable-aborted.js new file mode 100644 index 0000000000..68f74e9858 --- /dev/null +++ b/test/parallel/test-stream-writable-aborted.js @@ -0,0 +1,41 @@ + + 'use strict' + + const tap = require('tap'); + const silentConsole = { log() {}, error() {} }; + ; + +require('../common'); +const assert = require('assert'); +const { Writable } = require('../../lib/ours/index'); + +{ + const writable = new Writable({ + write() { + } + }); + assert.strictEqual(writable.writableAborted, false); + writable.destroy(); + assert.strictEqual(writable.writableAborted, true); +} + +{ + const writable = new Writable({ + write() { + } + }); + assert.strictEqual(writable.writableAborted, false); + writable.end(); + writable.destroy(); + assert.strictEqual(writable.writableAborted, true); +} + + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-writable-callback-twice.js b/test/parallel/test-stream-writable-callback-twice.js index 9115216946..819569fa14 100644 --- a/test/parallel/test-stream-writable-callback-twice.js +++ b/test/parallel/test-stream-writable-callback-twice.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); const stream = new Writable({ write(chunk, enc, cb) { cb(); cb(); } }); diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js index 15722705f0..f356d28dec 100644 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); class MyWritable extends stream.Writable { constructor(fn, options) { diff --git a/test/parallel/test-stream-writable-clear-buffer.js b/test/parallel/test-stream-writable-clear-buffer.js index 148df0e547..7e2a387c23 100644 --- a/test/parallel/test-stream-writable-clear-buffer.js +++ b/test/parallel/test-stream-writable-clear-buffer.js @@ -9,7 +9,7 @@ // the actual buffered request count are the same. const common = require('../common'); -const Stream = require('../../lib'); +const Stream = require('../../lib/ours/index'); const assert = require('assert'); class StreamWritable extends Stream.Writable { diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js index a55c552d8b..0353401b4f 100644 --- a/test/parallel/test-stream-writable-constructor-set-methods.js +++ b/test/parallel/test-stream-writable-constructor-set-methods.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); const bufferBlerg = Buffer.from('blerg'); const w = new Writable(); diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js index 8754727ce6..8180205fc3 100644 --- a/test/parallel/test-stream-writable-decoded-encoding.js +++ b/test/parallel/test-stream-writable-decoded-encoding.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); class MyWritable extends stream.Writable { constructor(fn, options) { diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js index 5c2f0c97c1..52f11fd2a8 100644 --- a/test/parallel/test-stream-writable-destroy.js +++ b/test/parallel/test-stream-writable-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Writable, addAbortSignal } = require('../../lib'); +const { Writable, addAbortSignal } = require('../../lib/ours/index'); const assert = require('assert'); { @@ -129,8 +129,6 @@ const assert = require('assert'); write.destroy(); - write.removeListener('finish', fail); - write.on('finish', common.mustCall()); assert.strictEqual(write.destroyed, true); } diff --git a/test/parallel/test-stream-writable-end-cb-error.js b/test/parallel/test-stream-writable-end-cb-error.js index b3f3df5927..c69be9a596 100644 --- a/test/parallel/test-stream-writable-end-cb-error.js +++ b/test/parallel/test-stream-writable-end-cb-error.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); { // Invoke end callback on failure. diff --git a/test/parallel/test-stream-writable-end-cb-uncaught.js b/test/parallel/test-stream-writable-end-cb-uncaught.js index f02db94897..222fb52546 100644 --- a/test/parallel/test-stream-writable-end-cb-uncaught.js +++ b/test/parallel/test-stream-writable-end-cb-uncaught.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); process.on('uncaughtException', common.mustCall((err) => { assert.strictEqual(err.message, 'kaboom'); diff --git a/test/parallel/test-stream-writable-end-multiple.js b/test/parallel/test-stream-writable-end-multiple.js index 552997402b..6b30345a9b 100644 --- a/test/parallel/test-stream-writable-end-multiple.js +++ b/test/parallel/test-stream-writable-end-multiple.js @@ -8,7 +8,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const writable = new stream.Writable(); writable._write = (chunk, encoding, cb) => { diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js index 0779c3f707..2b5a85b06b 100644 --- a/test/parallel/test-stream-writable-ended-state.js +++ b/test/parallel/test-stream-writable-ended-state.js @@ -8,7 +8,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const writable = new stream.Writable(); diff --git a/test/parallel/test-stream-writable-final-async.js b/test/parallel/test-stream-writable-final-async.js index 822d980950..37dbd08e6f 100644 --- a/test/parallel/test-stream-writable-final-async.js +++ b/test/parallel/test-stream-writable-final-async.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Duplex, -} = require('../../lib'); +} = require('../../lib/ours/index'); const st = require('timers').setTimeout; diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js index 7e7c998d33..580040d6b1 100644 --- a/test/parallel/test-stream-writable-final-destroy.js +++ b/test/parallel/test-stream-writable-final-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); { const w = new Writable({ diff --git a/test/parallel/test-stream-writable-final-throw.js b/test/parallel/test-stream-writable-final-throw.js index 471886557a..848434544c 100644 --- a/test/parallel/test-stream-writable-final-throw.js +++ b/test/parallel/test-stream-writable-final-throw.js @@ -8,7 +8,7 @@ const common = require('../common'); const { Duplex, -} = require('../../lib'); +} = require('../../lib/ours/index'); { class Foo extends Duplex { diff --git a/test/parallel/test-stream-writable-finish-destroyed.js b/test/parallel/test-stream-writable-finish-destroyed.js index ef72823a52..ec2ed6ba4f 100644 --- a/test/parallel/test-stream-writable-finish-destroyed.js +++ b/test/parallel/test-stream-writable-finish-destroyed.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); { const w = new Writable({ @@ -37,6 +37,16 @@ const { Writable } = require('../../lib'); w.destroy(); } +{ + const w = new Writable({ + write() { + } + }); + w.on('finish', common.mustNotCall()); + w.end(); + w.destroy(); +} + /* replacement start */ process.on('beforeExit', (code) => { if(code === 0) { diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js index 1ef0928728..5f477ce296 100644 --- a/test/parallel/test-stream-writable-finished-state.js +++ b/test/parallel/test-stream-writable-finished-state.js @@ -8,7 +8,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const writable = new stream.Writable(); diff --git a/test/parallel/test-stream-writable-finished.js b/test/parallel/test-stream-writable-finished.js index 28a2eaa03d..5f4a93df5e 100644 --- a/test/parallel/test-stream-writable-finished.js +++ b/test/parallel/test-stream-writable-finished.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); const assert = require('assert'); // basic diff --git a/test/parallel/test-stream-writable-invalid-chunk.js b/test/parallel/test-stream-writable-invalid-chunk.js index c3394abf9b..6db696bec0 100644 --- a/test/parallel/test-stream-writable-invalid-chunk.js +++ b/test/parallel/test-stream-writable-invalid-chunk.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); function testWriteType(val, objectMode, code) { diff --git a/test/parallel/test-stream-writable-needdrain-state.js b/test/parallel/test-stream-writable-needdrain-state.js index 4ccc328a5e..8a6c12e0f4 100644 --- a/test/parallel/test-stream-writable-needdrain-state.js +++ b/test/parallel/test-stream-writable-needdrain-state.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const assert = require('assert'); const transform = new stream.Transform({ diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js index 17ebd539d3..f9a8783f30 100644 --- a/test/parallel/test-stream-writable-null.js +++ b/test/parallel/test-stream-writable-null.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); class MyWritable extends stream.Writable { constructor(options) { diff --git a/test/parallel/test-stream-writable-properties.js b/test/parallel/test-stream-writable-properties.js index 712673129d..c2b5ce177a 100644 --- a/test/parallel/test-stream-writable-properties.js +++ b/test/parallel/test-stream-writable-properties.js @@ -7,7 +7,7 @@ require('../common'); const assert = require('assert'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); { const w = new Writable(); diff --git a/test/parallel/test-stream-writable-samecb-singletick.js b/test/parallel/test-stream-writable-samecb-singletick.js index d0d594fdaa..6f72e25db4 100644 --- a/test/parallel/test-stream-writable-samecb-singletick.js +++ b/test/parallel/test-stream-writable-samecb-singletick.js @@ -6,7 +6,7 @@ ; const common = require('../common'); const { Console } = require('console'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); const async_hooks = require('async_hooks'); // Make sure that repeated calls to silentConsole.log(), and by extension diff --git a/test/parallel/test-stream-writable-writable.js b/test/parallel/test-stream-writable-writable.js index 662e00ceaa..7236b0862c 100644 --- a/test/parallel/test-stream-writable-writable.js +++ b/test/parallel/test-stream-writable-writable.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); { const w = new Writable({ diff --git a/test/parallel/test-stream-writable-write-cb-error.js b/test/parallel/test-stream-writable-write-cb-error.js index 696ae67501..26e998f600 100644 --- a/test/parallel/test-stream-writable-write-cb-error.js +++ b/test/parallel/test-stream-writable-write-cb-error.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); const assert = require('assert'); // Ensure callback is always invoked before diff --git a/test/parallel/test-stream-writable-write-cb-twice.js b/test/parallel/test-stream-writable-write-cb-twice.js index 8b605562ed..988206e697 100644 --- a/test/parallel/test-stream-writable-write-cb-twice.js +++ b/test/parallel/test-stream-writable-write-cb-twice.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); { // Sync + Sync diff --git a/test/parallel/test-stream-writable-write-error.js b/test/parallel/test-stream-writable-write-error.js index ed44285ee4..92af5af549 100644 --- a/test/parallel/test-stream-writable-write-error.js +++ b/test/parallel/test-stream-writable-write-error.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); function expectError(w, args, code, sync) { if (sync) { diff --git a/test/parallel/test-stream-writable-write-writev-finish.js b/test/parallel/test-stream-writable-write-writev-finish.js index 05f404ebad..043b9bf07c 100644 --- a/test/parallel/test-stream-writable-write-writev-finish.js +++ b/test/parallel/test-stream-writable-write-writev-finish.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); // Ensure consistency between the finish event when using cork() // and writev and when not using them diff --git a/test/parallel/test-stream-writableState-ending.js b/test/parallel/test-stream-writableState-ending.js index 52613a74b0..29d9427e80 100644 --- a/test/parallel/test-stream-writableState-ending.js +++ b/test/parallel/test-stream-writableState-ending.js @@ -8,7 +8,7 @@ require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const writable = new stream.Writable(); diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js index ed329fceac..05edabd29d 100644 --- a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js +++ b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const writable = new stream.Writable(); diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js index b89cf454c9..9915c989a3 100644 --- a/test/parallel/test-stream-write-destroy.js +++ b/test/parallel/test-stream-write-destroy.js @@ -6,7 +6,7 @@ ; require('../common'); const assert = require('assert'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); // Test interaction between calling .destroy() on a writable and pending // writes. @@ -25,9 +25,7 @@ for (const withPendingData of [ false, true ]) { let chunksWritten = 0; let drains = 0; - let finished = false; w.on('drain', () => drains++); - w.on('finish', () => finished = true); function onWrite(err) { if (err) { @@ -65,10 +63,6 @@ for (const withPendingData of [ false, true ]) { assert.strictEqual(chunksWritten, useEnd && !withPendingData ? 1 : 2); assert.strictEqual(callbacks.length, 0); assert.strictEqual(drains, 1); - - // When we used `.end()`, we see the 'finished' event if and only if - // we actually finished processing the write queue. - assert.strictEqual(finished, !withPendingData && useEnd); } } diff --git a/test/parallel/test-stream-write-drain.js b/test/parallel/test-stream-write-drain.js index a094bef673..c0bed561f1 100644 --- a/test/parallel/test-stream-write-drain.js +++ b/test/parallel/test-stream-write-drain.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const { Writable } = require('../../lib'); +const { Writable } = require('../../lib/ours/index'); // Don't emit 'drain' if ended diff --git a/test/parallel/test-stream-write-final.js b/test/parallel/test-stream-write-final.js index e7be4673f0..cc802acca1 100644 --- a/test/parallel/test-stream-write-final.js +++ b/test/parallel/test-stream-write-final.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); let shutdown = false; const w = new stream.Writable({ diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js index 03b0c1222d..eaae012f52 100644 --- a/test/parallel/test-stream-writev.js +++ b/test/parallel/test-stream-writev.js @@ -28,7 +28,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const queue = []; for (let decode = 0; decode < 2; decode++) { diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js index 92708b701f..b11e27e674 100644 --- a/test/parallel/test-stream2-base64-single-char-read-end.js +++ b/test/parallel/test-stream2-base64-single-char-read-end.js @@ -26,7 +26,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const { Readable: R, Writable: W } = require('../../lib'); +const { Readable: R, Writable: W } = require('../../lib/ours/index'); const assert = require('assert'); const src = new R({ encoding: 'base64' }); diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js index ec533a4305..b75f95a100 100644 --- a/test/parallel/test-stream2-basic.js +++ b/test/parallel/test-stream2-basic.js @@ -27,7 +27,7 @@ ; const common = require('../common'); -const { Readable: R, Writable: W } = require('../../lib'); +const { Readable: R, Writable: W } = require('../../lib/ours/index'); const assert = require('assert'); const EE = require('events').EventEmitter; diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js index 4b07e970b8..3ba2b9393c 100644 --- a/test/parallel/test-stream2-compatibility.js +++ b/test/parallel/test-stream2-compatibility.js @@ -26,7 +26,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const { Readable: R, Writable: W } = require('../../lib'); +const { Readable: R, Writable: W } = require('../../lib/ours/index'); const assert = require('assert'); let ondataCalled = 0; diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js index 7a76fab0d6..1eba765509 100644 --- a/test/parallel/test-stream2-decode-partial.js +++ b/test/parallel/test-stream2-decode-partial.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const assert = require('assert'); let buf = ''; diff --git a/test/parallel/test-stream2-finish-pipe-error.js b/test/parallel/test-stream2-finish-pipe-error.js index c7f79939af..1872b9cd8e 100644 --- a/test/parallel/test-stream2-finish-pipe-error.js +++ b/test/parallel/test-stream2-finish-pipe-error.js @@ -5,7 +5,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); process.on('uncaughtException', common.mustCall()); diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js index 24e014e3f7..5e97159e3e 100644 --- a/test/parallel/test-stream2-finish-pipe.js +++ b/test/parallel/test-stream2-finish-pipe.js @@ -26,7 +26,7 @@ const silentConsole = { log() {}, error() {} }; ; require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const r = new stream.Readable(); r._read = function(size) { diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js index 8caef393ff..87db2aca76 100644 --- a/test/parallel/test-stream2-large-read-stall.js +++ b/test/parallel/test-stream2-large-read-stall.js @@ -36,7 +36,7 @@ const PUSHSIZE = 20; const PUSHCOUNT = 1000; const HWM = 50; -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; const r = new Readable({ highWaterMark: HWM }); diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js index 51fafbe5bc..260c2349ae 100644 --- a/test/parallel/test-stream2-objects.js +++ b/test/parallel/test-stream2-objects.js @@ -27,7 +27,7 @@ ; const common = require('../common'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); const assert = require('assert'); function toArray(callback) { diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js index e68b35897b..824626e5d5 100644 --- a/test/parallel/test-stream2-pipe-error-handling.js +++ b/test/parallel/test-stream2-pipe-error-handling.js @@ -27,7 +27,7 @@ ; require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); { let count = 1000; diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js index b199a374e1..9ea1f60850 100644 --- a/test/parallel/test-stream2-pipe-error-once-listener.js +++ b/test/parallel/test-stream2-pipe-error-once-listener.js @@ -27,7 +27,7 @@ ; require('../common'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); class Read extends stream.Readable { _read(size) { diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js index e61b58c13b..13271d8cf0 100644 --- a/test/parallel/test-stream2-push.js +++ b/test/parallel/test-stream2-push.js @@ -27,7 +27,7 @@ ; require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); const EE = require('events').EventEmitter; diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js index 24cb41068a..30aac079c5 100644 --- a/test/parallel/test-stream2-read-sync-stack.js +++ b/test/parallel/test-stream2-read-sync-stack.js @@ -26,7 +26,7 @@ const silentConsole = { log() {}, error() {} }; ; const common = require('../common'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; // This tests synchronous read callbacks and verifies that even if they nest // heavily the process handles it without an error diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js index c006c2639a..4ce5f9cfeb 100644 --- a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js +++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const Readable = require('../../lib').Readable; +const Readable = require('../../lib/ours/index').Readable; test1(); test2(); diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js index d706db2158..119faea1e5 100644 --- a/test/parallel/test-stream2-readable-from-list.js +++ b/test/parallel/test-stream2-readable-from-list.js @@ -28,7 +28,7 @@ ; require('../common'); const assert = require('assert'); -const fromList = require('../../lib').Readable._fromList; +const fromList = require('../../lib/ours/index').Readable._fromList; const BufferList = require('../../lib/internal/streams/buffer_list'); const util = require('util'); diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js index 32bd97c68e..ced1a808c9 100644 --- a/test/parallel/test-stream2-readable-legacy-drain.js +++ b/test/parallel/test-stream2-readable-legacy-drain.js @@ -28,7 +28,7 @@ const common = require('../common'); const assert = require('assert'); -const Stream = require('../../lib'); +const Stream = require('../../lib/ours/index'); const Readable = Stream.Readable; const r = new Readable(); diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js index 865487199c..051da2eea7 100644 --- a/test/parallel/test-stream2-readable-non-empty-end.js +++ b/test/parallel/test-stream2-readable-non-empty-end.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); let len = 0; const chunks = new Array(10); diff --git a/test/parallel/test-stream2-readable-wrap-destroy.js b/test/parallel/test-stream2-readable-wrap-destroy.js index c7352e6523..77dd8e34f0 100644 --- a/test/parallel/test-stream2-readable-wrap-destroy.js +++ b/test/parallel/test-stream2-readable-wrap-destroy.js @@ -6,7 +6,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const EE = require('events').EventEmitter; const oldStream = new EE(); diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js index 5fe3f04983..1b05e77cb2 100644 --- a/test/parallel/test-stream2-readable-wrap-empty.js +++ b/test/parallel/test-stream2-readable-wrap-empty.js @@ -27,7 +27,7 @@ ; const common = require('../common'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const EE = require('events').EventEmitter; const oldStream = new EE(); diff --git a/test/parallel/test-stream2-readable-wrap-error.js b/test/parallel/test-stream2-readable-wrap-error.js index 9ac221ba8b..ecf6d28a0d 100644 --- a/test/parallel/test-stream2-readable-wrap-error.js +++ b/test/parallel/test-stream2-readable-wrap-error.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const { Readable } = require('../../lib'); +const { Readable } = require('../../lib/ours/index'); const EE = require('events').EventEmitter; class LegacyStream extends EE { diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js index 3c78ddde95..2d127c5104 100644 --- a/test/parallel/test-stream2-readable-wrap.js +++ b/test/parallel/test-stream2-readable-wrap.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable, Writable } = require('../../lib'); +const { Readable, Writable } = require('../../lib/ours/index'); const EE = require('events').EventEmitter; function runTest(highWaterMark, objectMode, produce) { diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js index 75ddb1c4f9..046791a5c8 100644 --- a/test/parallel/test-stream2-set-encoding.js +++ b/test/parallel/test-stream2-set-encoding.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { Readable: R } = require('../../lib'); +const { Readable: R } = require('../../lib/ours/index'); class TestReader extends R { constructor(n, opts) { diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js index a086977206..872daf5062 100644 --- a/test/parallel/test-stream2-transform.js +++ b/test/parallel/test-stream2-transform.js @@ -27,7 +27,7 @@ ; const common = require('../common'); const assert = require('assert'); -const { PassThrough, Transform } = require('../../lib'); +const { PassThrough, Transform } = require('../../lib/ours/index'); { // Verify writable side consumption diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js index 5557077f57..53f0a32197 100644 --- a/test/parallel/test-stream2-unpipe-drain.js +++ b/test/parallel/test-stream2-unpipe-drain.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); class TestWriter extends stream.Writable { _write(buffer, encoding, callback) { diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js index af96915cac..f4c6ac8ed1 100644 --- a/test/parallel/test-stream2-unpipe-leak.js +++ b/test/parallel/test-stream2-unpipe-leak.js @@ -27,7 +27,7 @@ ; require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const chunk = Buffer.from('hallo'); diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js index bb5c04a777..2977adeba6 100644 --- a/test/parallel/test-stream2-writable.js +++ b/test/parallel/test-stream2-writable.js @@ -27,7 +27,7 @@ ; const common = require('../common'); -const { Writable: W, Duplex: D } = require('../../lib'); +const { Writable: W, Duplex: D } = require('../../lib/ours/index'); const assert = require('assert'); class TestWriter extends W { diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js index 48daff946c..665391f69a 100644 --- a/test/parallel/test-stream3-cork-end.js +++ b/test/parallel/test-stream3-cork-end.js @@ -6,7 +6,7 @@ ; require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const Writable = stream.Writable; // Test the buffering behavior of Writable streams. diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js index da60e247b6..d146ed1431 100644 --- a/test/parallel/test-stream3-cork-uncork.js +++ b/test/parallel/test-stream3-cork-uncork.js @@ -6,7 +6,7 @@ ; require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const Writable = stream.Writable; // Test the buffering behavior of Writable streams. diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js index b03b4e7c37..84a2173e87 100644 --- a/test/parallel/test-stream3-pause-then-read.js +++ b/test/parallel/test-stream3-pause-then-read.js @@ -28,7 +28,7 @@ require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const Readable = stream.Readable; const Writable = stream.Writable; diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js index daab8f3eae..b5d04577bf 100644 --- a/test/parallel/test-streams-highwatermark.js +++ b/test/parallel/test-streams-highwatermark.js @@ -7,7 +7,7 @@ const common = require('../common'); const assert = require('assert'); -const stream = require('../../lib'); +const stream = require('../../lib/ours/index'); const { inspect } = require('util'); { diff --git a/test/test-browser.js b/test/test-browser.js index e7ad511125..b555996722 100644 --- a/test/test-browser.js +++ b/test/test-browser.js @@ -2,33 +2,6 @@ const test = require('tape') -// if (!global.console) { -// global.console = {} -// } -// if (!global.console.log) { -// global.console.log = function () {} -// } -// if (!global.console.error) { -// global.console.error = global.console.log -// } -// if (!global.console.info) { -// global.console.info = global.console.log -// } - -// // TODO: add replacements instead -// global.process = { -// env: {}, -// on: function () {}, -// cwd: function () { -// return '/' -// }, -// binding: function () { -// return { -// hasTracing: false -// } -// } -// } - test('streams', function (t) { require('./browser/test-stream-big-packet')(t) require('./browser/test-stream-big-push')(t) @@ -68,8 +41,8 @@ test('streams 2', function (t) { require('./browser/test-stream2-pipe-error-once-listener')(t) require('./browser/test-stream2-push')(t) require('./browser/test-stream2-readable-empty-buffer-no-eof')(t) - // require('./browser/test-stream2-readable-from-list')(t); - // require('./browser/test-stream2-transform')(t); + require('./browser/test-stream2-readable-from-list')(t) + require('./browser/test-stream2-transform')(t) require('./browser/test-stream2-set-encoding')(t) require('./browser/test-stream2-readable-legacy-drain')(t) require('./browser/test-stream2-readable-wrap-empty')(t) From 87983b534cb44d44981566eb2a16844d8a8fc9de Mon Sep 17 00:00:00 2001 From: Shogun Date: Thu, 21 Apr 2022 07:04:41 +0200 Subject: [PATCH 04/19] test: Drop SauceLabs in favor of Playwright. --- .airtap.yml | 49 ++-------------- .github/workflows/browsers.yml | 36 ++++++++++++ .github/workflows/{ci.yml => node.yml} | 6 +- .github/workflows/sauce.yml | 37 ------------ README.md | 12 +--- build/build.mjs | 7 +-- package.json | 6 +- src/test/browser/test-stream-big-packet.js | 2 +- src/test/browser/test-stream-big-push.js | 2 +- src/test/browser/test-stream-duplex.js | 2 +- src/test/browser/test-stream-end-paused.js | 2 +- src/test/browser/test-stream-finished.js | 2 +- src/test/browser/test-stream-ispaused.js | 2 +- .../browser/test-stream-pipe-after-end.js | 2 +- .../browser/test-stream-pipe-cleanup-pause.js | 2 +- src/test/browser/test-stream-pipe-cleanup.js | 2 +- .../test-stream-pipe-error-handling.js | 2 +- src/test/browser/test-stream-pipe-event.js | 2 +- .../test-stream-pipe-without-listenerCount.js | 2 +- src/test/browser/test-stream-pipeline.js | 2 +- src/test/browser/test-stream-push-order.js | 2 +- src/test/browser/test-stream-push-strings.js | 2 +- ...stream-readable-constructor-set-methods.js | 2 +- .../browser/test-stream-readable-event.js | 2 +- src/test/browser/test-stream-sync-write.js | 2 +- ...tream-transform-constructor-set-methods.js | 2 +- ...tream-transform-objectmode-falsey-value.js | 2 +- .../test-stream-transform-split-objectmode.js | 2 +- .../test-stream-unshift-empty-chunk.js | 2 +- .../browser/test-stream-unshift-read-race.js | 2 +- ...stream-writable-change-default-encoding.js | 2 +- ...stream-writable-constructor-set-methods.js | 2 +- .../test-stream-writable-decoded-encoding.js | 2 +- src/test/browser/test-stream-writev.js | 2 +- ...est-stream2-base64-single-char-read-end.js | 2 +- .../browser/test-stream2-compatibility.js | 2 +- .../browser/test-stream2-large-read-stall.js | 2 +- src/test/browser/test-stream2-objects.js | 2 +- .../test-stream2-pipe-error-handling.js | 2 +- .../test-stream2-pipe-error-once-listener.js | 2 +- src/test/browser/test-stream2-push.js | 2 +- ...st-stream2-readable-empty-buffer-no-eof.js | 2 +- .../test-stream2-readable-legacy-drain.js | 2 +- .../test-stream2-readable-non-empty-end.js | 2 +- .../test-stream2-readable-wrap-empty.js | 2 +- .../browser/test-stream2-readable-wrap.js | 2 +- src/test/browser/test-stream2-set-encoding.js | 2 +- src/test/browser/test-stream2-transform.js | 2 +- src/test/browser/test-stream2-unpipe-drain.js | 2 +- src/test/browser/test-stream2-writable.js | 2 +- .../browser/test-stream3-pause-then-read.js | 2 +- src/test/test-browser.js | 57 ------------------- test/browser/test-stream-big-packet.js | 2 +- test/browser/test-stream-big-push.js | 2 +- test/browser/test-stream-duplex.js | 2 +- test/browser/test-stream-end-paused.js | 2 +- test/browser/test-stream-finished.js | 2 +- test/browser/test-stream-ispaused.js | 2 +- test/browser/test-stream-pipe-after-end.js | 2 +- .../browser/test-stream-pipe-cleanup-pause.js | 2 +- test/browser/test-stream-pipe-cleanup.js | 2 +- .../test-stream-pipe-error-handling.js | 2 +- test/browser/test-stream-pipe-event.js | 2 +- .../test-stream-pipe-without-listenerCount.js | 2 +- test/browser/test-stream-pipeline.js | 2 +- test/browser/test-stream-push-order.js | 2 +- test/browser/test-stream-push-strings.js | 2 +- ...stream-readable-constructor-set-methods.js | 2 +- test/browser/test-stream-readable-event.js | 2 +- test/browser/test-stream-sync-write.js | 2 +- ...tream-transform-constructor-set-methods.js | 2 +- ...tream-transform-objectmode-falsey-value.js | 2 +- .../test-stream-transform-split-objectmode.js | 2 +- .../test-stream-unshift-empty-chunk.js | 2 +- test/browser/test-stream-unshift-read-race.js | 2 +- ...stream-writable-change-default-encoding.js | 2 +- ...stream-writable-constructor-set-methods.js | 2 +- .../test-stream-writable-decoded-encoding.js | 2 +- test/browser/test-stream-writev.js | 2 +- ...est-stream2-base64-single-char-read-end.js | 2 +- test/browser/test-stream2-compatibility.js | 2 +- test/browser/test-stream2-large-read-stall.js | 2 +- test/browser/test-stream2-objects.js | 2 +- .../test-stream2-pipe-error-handling.js | 2 +- .../test-stream2-pipe-error-once-listener.js | 2 +- test/browser/test-stream2-push.js | 2 +- ...st-stream2-readable-empty-buffer-no-eof.js | 2 +- .../test-stream2-readable-legacy-drain.js | 2 +- .../test-stream2-readable-non-empty-end.js | 2 +- .../test-stream2-readable-wrap-empty.js | 2 +- test/browser/test-stream2-readable-wrap.js | 2 +- test/browser/test-stream2-set-encoding.js | 2 +- test/browser/test-stream2-transform.js | 2 +- test/browser/test-stream2-unpipe-drain.js | 2 +- test/browser/test-stream2-writable.js | 2 +- test/browser/test-stream3-pause-then-read.js | 2 +- test/test-browser.js | 57 ------------------- 97 files changed, 139 insertions(+), 304 deletions(-) create mode 100644 .github/workflows/browsers.yml rename .github/workflows/{ci.yml => node.yml} (91%) delete mode 100644 .github/workflows/sauce.yml delete mode 100644 src/test/test-browser.js delete mode 100644 test/test-browser.js diff --git a/.airtap.yml b/.airtap.yml index 6f17246646..86bbb2e747 100644 --- a/.airtap.yml +++ b/.airtap.yml @@ -1,42 +1,5 @@ -sauce_connect: true - presets: - sauce: - providers: - - airtap-sauce - browsers: - - name: chrome - - name: firefox - # Testing on Safari disabled due to https://github.com/airtap/sauce/issues/11 - # - name: safari - - name: edge - - sauce-chrome: - providers: - - airtap-sauce - browsers: - - name: chrome - - sauce-firefox: - providers: - - airtap-sauce - browsers: - - name: firefox - - # Testing on Safari disabled due to https://github.com/airtap/sauce/issues/11 - # sauce-safari: - # providers: - # - airtap-sauce - # browsers: - # - name: safari - - sauce-edge: - providers: - - airtap-sauce - browsers: - - name: edge - - local: + all: providers: - airtap-playwright browsers: @@ -48,25 +11,25 @@ presets: launch: channel: msedge - local-chrome: + chrome: providers: - airtap-playwright browsers: - - name: chrome + - name: chromium - local-firefox: + firefox: providers: - airtap-playwright browsers: - name: firefox - local-safari: + safari: providers: - airtap-playwright browsers: - name: webkit - local-edge: + edge: providers: - airtap-playwright browsers: diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml new file mode 100644 index 0000000000..4b7146a51e --- /dev/null +++ b/.github/workflows/browsers.yml @@ -0,0 +1,36 @@ +name: Browsers + +on: [push, pull_request] + +jobs: + build: + name: Browsers + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] + browser: ['chrome', 'firefox', 'safari', 'edge'] + exclude: + - os: ubuntu-latest + browser: safari + - os: windows-latest + browser: safari + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Use Node.js 16 + uses: actions/setup-node@v3 + with: + node-version: 16 + - name: Restore cached dependencies + uses: actions/cache@v3 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + - name: Install dependencies + run: npm install + - name: Install Browser + run: ./node_modules/.bin/playwright install ${{ fromJSON('{"chrome":"chromium","edge":"msedge","firefox":"firefox","safari":"webkit"}')[matrix.browser] }} + - name: Run Tests on Browsers + run: ./node_modules/.bin/airtap -p ${{ matrix.browser }} test/browser/test-*.js diff --git a/.github/workflows/ci.yml b/.github/workflows/node.yml similarity index 91% rename from .github/workflows/ci.yml rename to .github/workflows/node.yml index 003edcd6fe..78c07dfc8c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/node.yml @@ -1,10 +1,10 @@ -name: Node.js Build +name: Node.js on: [push, pull_request] jobs: build: - name: Node.js Build + name: Node.js runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -26,4 +26,4 @@ jobs: - name: Install dependencies run: npm install - name: Run Tests - run: npm run test + run: npm run coverage diff --git a/.github/workflows/sauce.yml b/.github/workflows/sauce.yml deleted file mode 100644 index a8750eb257..0000000000 --- a/.github/workflows/sauce.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Sauce Labs Build -on: push -jobs: - test: - name: Sauce Labs Build - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - browser: ['chrome', 'firefox', 'edge'] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Use Node.js 16 - uses: actions/setup-node@v3 - with: - node-version: 16 - - name: Restore cached dependencies - uses: actions/cache@v3 - with: - path: node_modules - key: node-modules-${{ hashFiles('package.json') }} - - name: Install dependencies - run: npm install - env: - # Download Sauce Connect binary now instead of on first run - SAUCE_CONNECT_DOWNLOAD_ON_INSTALL: true - - name: Add airtap.local to /etc/hosts - run: echo "127.0.0.1 airtap.local" | sudo tee -a /etc/hosts - - name: Pause to avoid Sauce Labs timeouts - run: sleep 30s - shell: bash - - name: Run Test - run: ./node_modules/.bin/airtap -p sauce-${{ matrix.browser }} test/browser/test-*.js - env: - SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }} - SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }} diff --git a/README.md b/README.md index 0db3f64ef8..6731b46a47 100644 --- a/README.md +++ b/README.md @@ -4,10 +4,8 @@ [![npm status](https://img.shields.io/npm/v/readable-stream.svg)](https://npm.im/readable-stream) [![node](https://img.shields.io/node/v/readable-stream.svg)](https://www.npmjs.org/package/readable-stream) -[![Node.js Build](https://github.com/nodejs/readable-stream/workflows/Node.js%20Build/badge.svg)](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js%20Build) -[![Sauce Labs Build](https://github.com/nodejs/readable-stream/workflows/Sauce%20Labs%20Build/badge.svg)](https://github.com/nodejs/readable-stream/actions?query=workflow%3ASauce%20Labs%20Build) - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) +[![Node.js Build](https://github.com/nodejs/readable-stream/workflows/Node.js/badge.svg)](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js) +[![Browsers Build](https://github.com/nodejs/readable-stream/workflows/Browsers/badge.svg)](https://github.com/nodejs/readable-stream/actions?query=workflow%3ABrowsers) ```bash npm install --save readable-stream @@ -51,10 +49,6 @@ v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. -### Big Thanks - -Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] - # Usage You can swap your `require('stream')` with `require('readable-stream')` @@ -119,5 +113,3 @@ Node.js. The responsibilities of the Streams Working Group include: - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E - **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com> - **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl> - -[sauce]: https://saucelabs.com diff --git a/build/build.mjs b/build/build.mjs index 337f43b23e..4176b7431c 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -183,13 +183,10 @@ async function main() { console.log(`Copying template to file ${highlightFile('lib/ours/util.js', 32)} ...`) await copyFile('src/util.js', 'lib/ours/util.js') - console.log(`Copying template to file ${highlightFile('test/test-browser.js', 32)} ...`) - await copyFile('src/test/test-browser.js', 'test/test-browser.js') - - console.log(`Copying template to file ${highlightFile('test/browser', 32)} ...`) + console.log(`Copying folder ${highlightFile('test/browser', 32)} ...`) await cp('src/test/browser', 'test/browser', { recursive: true }) - console.log(`Copying template to file ${highlightFile('test/ours', 32)} ...`) + console.log(`Copying folder ${highlightFile('test/ours', 32)} ...`) await cp('src/test/ours', 'test/ours', { recursive: true }) } diff --git a/package.json b/package.json index 9c1d4c69e7..f4df05ddec 100644 --- a/package.json +++ b/package.json @@ -30,14 +30,13 @@ ], "browser": { "util": "./lib/ours/util.js", - "./lib/index.js": "./lib/ours/browser.js" + "./lib/ours/index.js": "./lib/ours/browser.js" }, "scripts": { "build": "node build/build.mjs", "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test:browsers": "airtap -p sauce test/browser/test-*.js", - "test:browsers:local": "airtap -p local test/browser/test-*.js", + "test:browsers": "airtap -p all test/browser/test-*.js", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src", "lint": "eslint src" @@ -51,7 +50,6 @@ "@sinonjs/fake-timers": "^9.1.1", "airtap": "^4.0.4", "airtap-playwright": "^1.0.1", - "airtap-sauce": "^1.1.2", "c8": "^7.11.0", "eslint": "^7.32.0", "eslint-config-standard": "^16.0.3", diff --git a/src/test/browser/test-stream-big-packet.js b/src/test/browser/test-stream-big-packet.js index 38e4b2e2ac..5c096e5115 100644 --- a/src/test/browser/test-stream-big-packet.js +++ b/src/test/browser/test-stream-big-packet.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Transform } = require('../../lib') +const { Transform } = require('../../lib/ours/index') test('big packet', function (t) { t.plan(3) diff --git a/src/test/browser/test-stream-big-push.js b/src/test/browser/test-stream-big-push.js index 46b2524df6..97a5cff3a6 100644 --- a/src/test/browser/test-stream-big-push.js +++ b/src/test/browser/test-stream-big-push.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('big push', function (t) { t.plan(10) diff --git a/src/test/browser/test-stream-duplex.js b/src/test/browser/test-stream-duplex.js index ec17de6c4e..d35e641c3d 100644 --- a/src/test/browser/test-stream-duplex.js +++ b/src/test/browser/test-stream-duplex.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Duplex } = require('../../lib') +const { Duplex } = require('../../lib/ours/index') test('duplex', function (t) { t.plan(4) diff --git a/src/test/browser/test-stream-end-paused.js b/src/test/browser/test-stream-end-paused.js index 927fa24943..7a8909674f 100644 --- a/src/test/browser/test-stream-end-paused.js +++ b/src/test/browser/test-stream-end-paused.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('end pause', function (t) { t.plan(2) diff --git a/src/test/browser/test-stream-finished.js b/src/test/browser/test-stream-finished.js index 8b9190d8c9..6c688c0a22 100644 --- a/src/test/browser/test-stream-finished.js +++ b/src/test/browser/test-stream-finished.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Writable, Readable, Transform, finished } = require('../../lib') +const { Writable, Readable, Transform, finished } = require('../../lib/ours/index') test('readable finished', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream-ispaused.js b/src/test/browser/test-stream-ispaused.js index 27cb33ad11..f9cf113f1c 100644 --- a/src/test/browser/test-stream-ispaused.js +++ b/src/test/browser/test-stream-ispaused.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('is paused', function (t) { t.plan(4) diff --git a/src/test/browser/test-stream-pipe-after-end.js b/src/test/browser/test-stream-pipe-after-end.js index 24401fb140..dc33a4e645 100644 --- a/src/test/browser/test-stream-pipe-after-end.js +++ b/src/test/browser/test-stream-pipe-after-end.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('pipe after end', function (t) { t.plan(4) diff --git a/src/test/browser/test-stream-pipe-cleanup-pause.js b/src/test/browser/test-stream-pipe-cleanup-pause.js index e49cf4c5f1..2ca267511b 100644 --- a/src/test/browser/test-stream-pipe-cleanup-pause.js +++ b/src/test/browser/test-stream-pipe-cleanup-pause.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('pipe cleanup pause', function (t) { t.plan(3) diff --git a/src/test/browser/test-stream-pipe-cleanup.js b/src/test/browser/test-stream-pipe-cleanup.js index 8350a8297a..ee07304d6b 100644 --- a/src/test/browser/test-stream-pipe-cleanup.js +++ b/src/test/browser/test-stream-pipe-cleanup.js @@ -4,7 +4,7 @@ const test = require('tape') const inherits = require('inherits') -const { Stream } = require('../../lib') +const { Stream } = require('../../lib/ours/index') test('pipe cleanup', function (t) { t.plan(27) diff --git a/src/test/browser/test-stream-pipe-error-handling.js b/src/test/browser/test-stream-pipe-error-handling.js index 005167679c..55455805a9 100644 --- a/src/test/browser/test-stream-pipe-error-handling.js +++ b/src/test/browser/test-stream-pipe-error-handling.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable, Stream } = require('../../lib') +const { Readable, Writable, Stream } = require('../../lib/ours/index') test('Error Listener Catches', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream-pipe-event.js b/src/test/browser/test-stream-pipe-event.js index e39109f645..272a23cbca 100644 --- a/src/test/browser/test-stream-pipe-event.js +++ b/src/test/browser/test-stream-pipe-event.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Stream } = require('../../lib') +const { Stream } = require('../../lib/ours/index') test('pipe event', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream-pipe-without-listenerCount.js b/src/test/browser/test-stream-pipe-without-listenerCount.js index 448d362a7c..36fa85e462 100644 --- a/src/test/browser/test-stream-pipe-without-listenerCount.js +++ b/src/test/browser/test-stream-pipe-without-listenerCount.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Stream } = require('../../lib') +const { Stream } = require('../../lib/ours/index') test('pipe without listenerCount on read', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream-pipeline.js b/src/test/browser/test-stream-pipeline.js index 0e1180a7f2..26bba1a1c4 100644 --- a/src/test/browser/test-stream-pipeline.js +++ b/src/test/browser/test-stream-pipeline.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable, pipeline } = require('../../lib') +const { Readable, Writable, pipeline } = require('../../lib/ours/index') test('pipeline', function (t) { t.plan(3) diff --git a/src/test/browser/test-stream-push-order.js b/src/test/browser/test-stream-push-order.js index e5aef44618..6867c874cb 100644 --- a/src/test/browser/test-stream-push-order.js +++ b/src/test/browser/test-stream-push-order.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('push order', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream-push-strings.js b/src/test/browser/test-stream-push-strings.js index 5344cdf1e0..d2bad89d61 100644 --- a/src/test/browser/test-stream-push-strings.js +++ b/src/test/browser/test-stream-push-strings.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('push strings', function (t) { t.plan(2) diff --git a/src/test/browser/test-stream-readable-constructor-set-methods.js b/src/test/browser/test-stream-readable-constructor-set-methods.js index 9d1fd3f234..895dfd1d1a 100644 --- a/src/test/browser/test-stream-readable-constructor-set-methods.js +++ b/src/test/browser/test-stream-readable-constructor-set-methods.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('readable constructor set methods', function (t) { t.plan(2) diff --git a/src/test/browser/test-stream-readable-event.js b/src/test/browser/test-stream-readable-event.js index ae611f6b83..dc2a9ea65c 100644 --- a/src/test/browser/test-stream-readable-event.js +++ b/src/test/browser/test-stream-readable-event.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('readable events - first', (t) => { t.plan(3) diff --git a/src/test/browser/test-stream-sync-write.js b/src/test/browser/test-stream-sync-write.js index 92e327eed5..06fbebab84 100644 --- a/src/test/browser/test-stream-sync-write.js +++ b/src/test/browser/test-stream-sync-write.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Writable } = require('../../lib') +const { Writable } = require('../../lib/ours/index') test('should bea ble to write sync', function (t) { t.plan(2) diff --git a/src/test/browser/test-stream-transform-constructor-set-methods.js b/src/test/browser/test-stream-transform-constructor-set-methods.js index 4cefa63dff..2ce4a0ea08 100644 --- a/src/test/browser/test-stream-transform-constructor-set-methods.js +++ b/src/test/browser/test-stream-transform-constructor-set-methods.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Transform } = require('../../lib') +const { Transform } = require('../../lib/ours/index') test('transform constructor set methods', function (t) { t.plan(4) diff --git a/src/test/browser/test-stream-transform-objectmode-falsey-value.js b/src/test/browser/test-stream-transform-objectmode-falsey-value.js index b496acb6de..bd2359bb51 100644 --- a/src/test/browser/test-stream-transform-objectmode-falsey-value.js +++ b/src/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { PassThrough } = require('../../lib') +const { PassThrough } = require('../../lib/ours/index') test('transform objectmode falsey value', function (t) { t.plan(13) diff --git a/src/test/browser/test-stream-transform-split-objectmode.js b/src/test/browser/test-stream-transform-split-objectmode.js index e23beb53ed..21515858ca 100644 --- a/src/test/browser/test-stream-transform-split-objectmode.js +++ b/src/test/browser/test-stream-transform-split-objectmode.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Transform } = require('../../lib') +const { Transform } = require('../../lib/ours/index') test('transform split objectmode', function (t) { t.plan(10) diff --git a/src/test/browser/test-stream-unshift-empty-chunk.js b/src/test/browser/test-stream-unshift-empty-chunk.js index b95f11bae5..6580116231 100644 --- a/src/test/browser/test-stream-unshift-empty-chunk.js +++ b/src/test/browser/test-stream-unshift-empty-chunk.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('unshift empty chunk', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream-unshift-read-race.js b/src/test/browser/test-stream-unshift-read-race.js index baa6477878..d7e17d143e 100644 --- a/src/test/browser/test-stream-unshift-read-race.js +++ b/src/test/browser/test-stream-unshift-read-race.js @@ -8,7 +8,7 @@ // 4. _read() is not called after pushing the EOF null chunk. const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('unshift read race', function (t) { t.plan(139) diff --git a/src/test/browser/test-stream-writable-change-default-encoding.js b/src/test/browser/test-stream-writable-change-default-encoding.js index 5f664be178..ef8ec00184 100644 --- a/src/test/browser/test-stream-writable-change-default-encoding.js +++ b/src/test/browser/test-stream-writable-change-default-encoding.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') inherits(MyWritable, stream.Writable) diff --git a/src/test/browser/test-stream-writable-constructor-set-methods.js b/src/test/browser/test-stream-writable-constructor-set-methods.js index 71c6f55e6e..7eb357e6e6 100644 --- a/src/test/browser/test-stream-writable-constructor-set-methods.js +++ b/src/test/browser/test-stream-writable-constructor-set-methods.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Writable } = require('../../lib') +const { Writable } = require('../../lib/ours/index') test('writable constructor set methods', function (t) { t.plan(5) diff --git a/src/test/browser/test-stream-writable-decoded-encoding.js b/src/test/browser/test-stream-writable-decoded-encoding.js index 4dbba262d8..608e7a89fc 100644 --- a/src/test/browser/test-stream-writable-decoded-encoding.js +++ b/src/test/browser/test-stream-writable-decoded-encoding.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') function MyWritable(fn, options) { stream.Writable.call(this, options) diff --git a/src/test/browser/test-stream-writev.js b/src/test/browser/test-stream-writev.js index a100517f3f..66022703a2 100644 --- a/src/test/browser/test-stream-writev.js +++ b/src/test/browser/test-stream-writev.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') const queue = [] for (let decode = 0; decode < 2; decode++) { diff --git a/src/test/browser/test-stream2-base64-single-char-read-end.js b/src/test/browser/test-stream2-base64-single-char-read-end.js index dd5dc5bf61..6e09201f67 100644 --- a/src/test/browser/test-stream2-base64-single-char-read-end.js +++ b/src/test/browser/test-stream2-base64-single-char-read-end.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('base64 single char read end', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream2-compatibility.js b/src/test/browser/test-stream2-compatibility.js index d9abbba6c0..bed96d8903 100644 --- a/src/test/browser/test-stream2-compatibility.js +++ b/src/test/browser/test-stream2-compatibility.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('compatibility', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream2-large-read-stall.js b/src/test/browser/test-stream2-large-read-stall.js index 42c8e66cc7..25e64b503d 100644 --- a/src/test/browser/test-stream2-large-read-stall.js +++ b/src/test/browser/test-stream2-large-read-stall.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('large object read stall', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream2-objects.js b/src/test/browser/test-stream2-objects.js index 75881abf8d..986d9d57dc 100644 --- a/src/test/browser/test-stream2-objects.js +++ b/src/test/browser/test-stream2-objects.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') function toArray(callback) { const stream = new Writable({ objectMode: true }) diff --git a/src/test/browser/test-stream2-pipe-error-handling.js b/src/test/browser/test-stream2-pipe-error-handling.js index 2eef740e9c..d198505229 100644 --- a/src/test/browser/test-stream2-pipe-error-handling.js +++ b/src/test/browser/test-stream2-pipe-error-handling.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('Error Listener Catches', function (t) { t.plan(3) diff --git a/src/test/browser/test-stream2-pipe-error-once-listener.js b/src/test/browser/test-stream2-pipe-error-once-listener.js index afb29324b6..071295d03a 100644 --- a/src/test/browser/test-stream2-pipe-error-once-listener.js +++ b/src/test/browser/test-stream2-pipe-error-once-listener.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('pipe error once listener', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream2-push.js b/src/test/browser/test-stream2-push.js index ba7c4eb39e..f826852c46 100644 --- a/src/test/browser/test-stream2-push.js +++ b/src/test/browser/test-stream2-push.js @@ -2,7 +2,7 @@ const test = require('tape') const { EventEmitter: EE } = require('events') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('push', function (t) { t.plan(33) diff --git a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js index aa2fce315c..794f43d8d5 100644 --- a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js +++ b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('readable empty buffer no eof 1', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream2-readable-legacy-drain.js b/src/test/browser/test-stream2-readable-legacy-drain.js index 40a042e72e..ab72df33f7 100644 --- a/src/test/browser/test-stream2-readable-legacy-drain.js +++ b/src/test/browser/test-stream2-readable-legacy-drain.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Stream, Readable } = require('../../lib') +const { Stream, Readable } = require('../../lib/ours/index') test('readable legacy drain', function (t) { t.plan(3) diff --git a/src/test/browser/test-stream2-readable-non-empty-end.js b/src/test/browser/test-stream2-readable-non-empty-end.js index bd1c29bc5d..dd7bbbc6e3 100644 --- a/src/test/browser/test-stream2-readable-non-empty-end.js +++ b/src/test/browser/test-stream2-readable-non-empty-end.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('non empty end', function (t) { t.plan(4) diff --git a/src/test/browser/test-stream2-readable-wrap-empty.js b/src/test/browser/test-stream2-readable-wrap-empty.js index ea870bc99a..2f86c95ce0 100644 --- a/src/test/browser/test-stream2-readable-wrap-empty.js +++ b/src/test/browser/test-stream2-readable-wrap-empty.js @@ -2,7 +2,7 @@ const test = require('tape') const { EventEmitter: EE } = require('events') -const Readable = require('../../lib') +const Readable = require('../../lib/ours/index') test('wrap empty', function (t) { t.plan(1) diff --git a/src/test/browser/test-stream2-readable-wrap.js b/src/test/browser/test-stream2-readable-wrap.js index e98f039f40..e3edc32253 100644 --- a/src/test/browser/test-stream2-readable-wrap.js +++ b/src/test/browser/test-stream2-readable-wrap.js @@ -2,7 +2,7 @@ const test = require('tape') const { EventEmitter: EE } = require('events') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') let run = 0 diff --git a/src/test/browser/test-stream2-set-encoding.js b/src/test/browser/test-stream2-set-encoding.js index 2b0de36be2..783a424c88 100644 --- a/src/test/browser/test-stream2-set-encoding.js +++ b/src/test/browser/test-stream2-set-encoding.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') inherits(TestReader, Readable) diff --git a/src/test/browser/test-stream2-transform.js b/src/test/browser/test-stream2-transform.js index f32063c4c7..43a1fc5afc 100644 --- a/src/test/browser/test-stream2-transform.js +++ b/src/test/browser/test-stream2-transform.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { PassThrough, Transform } = require('../../lib') +const { PassThrough, Transform } = require('../../lib/ours/index') test('writable side consumption', function (t) { t.plan(3) diff --git a/src/test/browser/test-stream2-unpipe-drain.js b/src/test/browser/test-stream2-unpipe-drain.js index 88ab66e5a4..72b137a0c1 100644 --- a/src/test/browser/test-stream2-unpipe-drain.js +++ b/src/test/browser/test-stream2-unpipe-drain.js @@ -3,7 +3,7 @@ const test = require('tape') const crypto = require('crypto') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('unpipe drain', function (t) { try { diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js index 3cce73b00f..834ab94523 100644 --- a/src/test/browser/test-stream2-writable.js +++ b/src/test/browser/test-stream2-writable.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Duplex, Writable } = require('../../lib') +const { Duplex, Writable } = require('../../lib/ours/index') inherits(TestWriter, Writable) diff --git a/src/test/browser/test-stream3-pause-then-read.js b/src/test/browser/test-stream3-pause-then-read.js index 34aa272d88..f7b22f7b46 100644 --- a/src/test/browser/test-stream3-pause-then-read.js +++ b/src/test/browser/test-stream3-pause-then-read.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('pause then read', function (t) { t.plan(7) diff --git a/src/test/test-browser.js b/src/test/test-browser.js deleted file mode 100644 index b555996722..0000000000 --- a/src/test/test-browser.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict' - -const test = require('tape') - -test('streams', function (t) { - require('./browser/test-stream-big-packet')(t) - require('./browser/test-stream-big-push')(t) - require('./browser/test-stream-duplex')(t) - require('./browser/test-stream-end-paused')(t) - require('./browser/test-stream-ispaused')(t) - require('./browser/test-stream-finished')(t) - require('./browser/test-stream-pipeline')(t) - require('./browser/test-stream-pipe-after-end')(t) - require('./browser/test-stream-pipe-cleanup')(t) - require('./browser/test-stream-pipe-cleanup-pause')(t) - require('./browser/test-stream-pipe-error-handling')(t) - require('./browser/test-stream-pipe-event')(t) - require('./browser/test-stream-push-order')(t) - require('./browser/test-stream-push-strings')(t) - require('./browser/test-stream-readable-constructor-set-methods')(t) - require('./browser/test-stream-readable-event')(t) - require('./browser/test-stream-transform-constructor-set-methods')(t) - require('./browser/test-stream-transform-objectmode-falsey-value')(t) - require('./browser/test-stream-transform-split-objectmode')(t) - require('./browser/test-stream-unshift-empty-chunk')(t) - require('./browser/test-stream-unshift-read-race')(t) - require('./browser/test-stream-writable-change-default-encoding')(t) - require('./browser/test-stream-writable-constructor-set-methods')(t) - require('./browser/test-stream-writable-decoded-encoding')(t) - require('./browser/test-stream-writev')(t) - require('./browser/test-stream-sync-write')(t) - require('./browser/test-stream-pipe-without-listenerCount') -}) - -test('streams 2', function (t) { - require('./browser/test-stream2-base64-single-char-read-end')(t) - require('./browser/test-stream2-compatibility')(t) - require('./browser/test-stream2-large-read-stall')(t) - require('./browser/test-stream2-objects')(t) - require('./browser/test-stream2-pipe-error-handling')(t) - require('./browser/test-stream2-pipe-error-once-listener')(t) - require('./browser/test-stream2-push')(t) - require('./browser/test-stream2-readable-empty-buffer-no-eof')(t) - require('./browser/test-stream2-readable-from-list')(t) - require('./browser/test-stream2-transform')(t) - require('./browser/test-stream2-set-encoding')(t) - require('./browser/test-stream2-readable-legacy-drain')(t) - require('./browser/test-stream2-readable-wrap-empty')(t) - require('./browser/test-stream2-readable-non-empty-end')(t) - require('./browser/test-stream2-readable-wrap')(t) - require('./browser/test-stream2-unpipe-drain')(t) - require('./browser/test-stream2-writable')(t) -}) - -test('streams 3', function (t) { - require('./browser/test-stream3-pause-then-read')(t) -}) diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js index 38e4b2e2ac..5c096e5115 100644 --- a/test/browser/test-stream-big-packet.js +++ b/test/browser/test-stream-big-packet.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Transform } = require('../../lib') +const { Transform } = require('../../lib/ours/index') test('big packet', function (t) { t.plan(3) diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js index 46b2524df6..97a5cff3a6 100644 --- a/test/browser/test-stream-big-push.js +++ b/test/browser/test-stream-big-push.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('big push', function (t) { t.plan(10) diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js index ec17de6c4e..d35e641c3d 100644 --- a/test/browser/test-stream-duplex.js +++ b/test/browser/test-stream-duplex.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Duplex } = require('../../lib') +const { Duplex } = require('../../lib/ours/index') test('duplex', function (t) { t.plan(4) diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js index 927fa24943..7a8909674f 100644 --- a/test/browser/test-stream-end-paused.js +++ b/test/browser/test-stream-end-paused.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('end pause', function (t) { t.plan(2) diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js index 8b9190d8c9..6c688c0a22 100644 --- a/test/browser/test-stream-finished.js +++ b/test/browser/test-stream-finished.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Writable, Readable, Transform, finished } = require('../../lib') +const { Writable, Readable, Transform, finished } = require('../../lib/ours/index') test('readable finished', function (t) { t.plan(1) diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js index 27cb33ad11..f9cf113f1c 100644 --- a/test/browser/test-stream-ispaused.js +++ b/test/browser/test-stream-ispaused.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('is paused', function (t) { t.plan(4) diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js index 24401fb140..dc33a4e645 100644 --- a/test/browser/test-stream-pipe-after-end.js +++ b/test/browser/test-stream-pipe-after-end.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('pipe after end', function (t) { t.plan(4) diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js index e49cf4c5f1..2ca267511b 100644 --- a/test/browser/test-stream-pipe-cleanup-pause.js +++ b/test/browser/test-stream-pipe-cleanup-pause.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('pipe cleanup pause', function (t) { t.plan(3) diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js index 8350a8297a..ee07304d6b 100644 --- a/test/browser/test-stream-pipe-cleanup.js +++ b/test/browser/test-stream-pipe-cleanup.js @@ -4,7 +4,7 @@ const test = require('tape') const inherits = require('inherits') -const { Stream } = require('../../lib') +const { Stream } = require('../../lib/ours/index') test('pipe cleanup', function (t) { t.plan(27) diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js index 005167679c..55455805a9 100644 --- a/test/browser/test-stream-pipe-error-handling.js +++ b/test/browser/test-stream-pipe-error-handling.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable, Stream } = require('../../lib') +const { Readable, Writable, Stream } = require('../../lib/ours/index') test('Error Listener Catches', function (t) { t.plan(1) diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js index e39109f645..272a23cbca 100644 --- a/test/browser/test-stream-pipe-event.js +++ b/test/browser/test-stream-pipe-event.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Stream } = require('../../lib') +const { Stream } = require('../../lib/ours/index') test('pipe event', function (t) { t.plan(1) diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js index 448d362a7c..36fa85e462 100644 --- a/test/browser/test-stream-pipe-without-listenerCount.js +++ b/test/browser/test-stream-pipe-without-listenerCount.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Stream } = require('../../lib') +const { Stream } = require('../../lib/ours/index') test('pipe without listenerCount on read', function (t) { t.plan(1) diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js index 0e1180a7f2..26bba1a1c4 100644 --- a/test/browser/test-stream-pipeline.js +++ b/test/browser/test-stream-pipeline.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable, pipeline } = require('../../lib') +const { Readable, Writable, pipeline } = require('../../lib/ours/index') test('pipeline', function (t) { t.plan(3) diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js index e5aef44618..6867c874cb 100644 --- a/test/browser/test-stream-push-order.js +++ b/test/browser/test-stream-push-order.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('push order', function (t) { t.plan(1) diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js index 5344cdf1e0..d2bad89d61 100644 --- a/test/browser/test-stream-push-strings.js +++ b/test/browser/test-stream-push-strings.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('push strings', function (t) { t.plan(2) diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js index 9d1fd3f234..895dfd1d1a 100644 --- a/test/browser/test-stream-readable-constructor-set-methods.js +++ b/test/browser/test-stream-readable-constructor-set-methods.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('readable constructor set methods', function (t) { t.plan(2) diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js index ae611f6b83..dc2a9ea65c 100644 --- a/test/browser/test-stream-readable-event.js +++ b/test/browser/test-stream-readable-event.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('readable events - first', (t) => { t.plan(3) diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js index 92e327eed5..06fbebab84 100644 --- a/test/browser/test-stream-sync-write.js +++ b/test/browser/test-stream-sync-write.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Writable } = require('../../lib') +const { Writable } = require('../../lib/ours/index') test('should bea ble to write sync', function (t) { t.plan(2) diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js index 4cefa63dff..2ce4a0ea08 100644 --- a/test/browser/test-stream-transform-constructor-set-methods.js +++ b/test/browser/test-stream-transform-constructor-set-methods.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Transform } = require('../../lib') +const { Transform } = require('../../lib/ours/index') test('transform constructor set methods', function (t) { t.plan(4) diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js index b496acb6de..bd2359bb51 100644 --- a/test/browser/test-stream-transform-objectmode-falsey-value.js +++ b/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { PassThrough } = require('../../lib') +const { PassThrough } = require('../../lib/ours/index') test('transform objectmode falsey value', function (t) { t.plan(13) diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js index e23beb53ed..21515858ca 100644 --- a/test/browser/test-stream-transform-split-objectmode.js +++ b/test/browser/test-stream-transform-split-objectmode.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Transform } = require('../../lib') +const { Transform } = require('../../lib/ours/index') test('transform split objectmode', function (t) { t.plan(10) diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js index b95f11bae5..6580116231 100644 --- a/test/browser/test-stream-unshift-empty-chunk.js +++ b/test/browser/test-stream-unshift-empty-chunk.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('unshift empty chunk', function (t) { t.plan(1) diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js index baa6477878..d7e17d143e 100644 --- a/test/browser/test-stream-unshift-read-race.js +++ b/test/browser/test-stream-unshift-read-race.js @@ -8,7 +8,7 @@ // 4. _read() is not called after pushing the EOF null chunk. const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('unshift read race', function (t) { t.plan(139) diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js index 5f664be178..ef8ec00184 100644 --- a/test/browser/test-stream-writable-change-default-encoding.js +++ b/test/browser/test-stream-writable-change-default-encoding.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') inherits(MyWritable, stream.Writable) diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js index 71c6f55e6e..7eb357e6e6 100644 --- a/test/browser/test-stream-writable-constructor-set-methods.js +++ b/test/browser/test-stream-writable-constructor-set-methods.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Writable } = require('../../lib') +const { Writable } = require('../../lib/ours/index') test('writable constructor set methods', function (t) { t.plan(5) diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js index 4dbba262d8..608e7a89fc 100644 --- a/test/browser/test-stream-writable-decoded-encoding.js +++ b/test/browser/test-stream-writable-decoded-encoding.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') function MyWritable(fn, options) { stream.Writable.call(this, options) diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js index a100517f3f..66022703a2 100644 --- a/test/browser/test-stream-writev.js +++ b/test/browser/test-stream-writev.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') const queue = [] for (let decode = 0; decode < 2; decode++) { diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js index dd5dc5bf61..6e09201f67 100644 --- a/test/browser/test-stream2-base64-single-char-read-end.js +++ b/test/browser/test-stream2-base64-single-char-read-end.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('base64 single char read end', function (t) { t.plan(1) diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js index d9abbba6c0..bed96d8903 100644 --- a/test/browser/test-stream2-compatibility.js +++ b/test/browser/test-stream2-compatibility.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('compatibility', function (t) { t.plan(1) diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js index 42c8e66cc7..25e64b503d 100644 --- a/test/browser/test-stream2-large-read-stall.js +++ b/test/browser/test-stream2-large-read-stall.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('large object read stall', function (t) { t.plan(1) diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js index 75881abf8d..986d9d57dc 100644 --- a/test/browser/test-stream2-objects.js +++ b/test/browser/test-stream2-objects.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') function toArray(callback) { const stream = new Writable({ objectMode: true }) diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js index 2eef740e9c..d198505229 100644 --- a/test/browser/test-stream2-pipe-error-handling.js +++ b/test/browser/test-stream2-pipe-error-handling.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('Error Listener Catches', function (t) { t.plan(3) diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js index afb29324b6..071295d03a 100644 --- a/test/browser/test-stream2-pipe-error-once-listener.js +++ b/test/browser/test-stream2-pipe-error-once-listener.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('pipe error once listener', function (t) { t.plan(1) diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js index ba7c4eb39e..f826852c46 100644 --- a/test/browser/test-stream2-push.js +++ b/test/browser/test-stream2-push.js @@ -2,7 +2,7 @@ const test = require('tape') const { EventEmitter: EE } = require('events') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('push', function (t) { t.plan(33) diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js index aa2fce315c..794f43d8d5 100644 --- a/test/browser/test-stream2-readable-empty-buffer-no-eof.js +++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('readable empty buffer no eof 1', function (t) { t.plan(1) diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js index 40a042e72e..ab72df33f7 100644 --- a/test/browser/test-stream2-readable-legacy-drain.js +++ b/test/browser/test-stream2-readable-legacy-drain.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Stream, Readable } = require('../../lib') +const { Stream, Readable } = require('../../lib/ours/index') test('readable legacy drain', function (t) { t.plan(3) diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js index bd1c29bc5d..dd7bbbc6e3 100644 --- a/test/browser/test-stream2-readable-non-empty-end.js +++ b/test/browser/test-stream2-readable-non-empty-end.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') test('non empty end', function (t) { t.plan(4) diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js index ea870bc99a..2f86c95ce0 100644 --- a/test/browser/test-stream2-readable-wrap-empty.js +++ b/test/browser/test-stream2-readable-wrap-empty.js @@ -2,7 +2,7 @@ const test = require('tape') const { EventEmitter: EE } = require('events') -const Readable = require('../../lib') +const Readable = require('../../lib/ours/index') test('wrap empty', function (t) { t.plan(1) diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js index e98f039f40..e3edc32253 100644 --- a/test/browser/test-stream2-readable-wrap.js +++ b/test/browser/test-stream2-readable-wrap.js @@ -2,7 +2,7 @@ const test = require('tape') const { EventEmitter: EE } = require('events') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') let run = 0 diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js index 2b0de36be2..783a424c88 100644 --- a/test/browser/test-stream2-set-encoding.js +++ b/test/browser/test-stream2-set-encoding.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Readable } = require('../../lib') +const { Readable } = require('../../lib/ours/index') inherits(TestReader, Readable) diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js index f32063c4c7..43a1fc5afc 100644 --- a/test/browser/test-stream2-transform.js +++ b/test/browser/test-stream2-transform.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { PassThrough, Transform } = require('../../lib') +const { PassThrough, Transform } = require('../../lib/ours/index') test('writable side consumption', function (t) { t.plan(3) diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js index 88ab66e5a4..72b137a0c1 100644 --- a/test/browser/test-stream2-unpipe-drain.js +++ b/test/browser/test-stream2-unpipe-drain.js @@ -3,7 +3,7 @@ const test = require('tape') const crypto = require('crypto') const inherits = require('inherits') -const stream = require('../../lib') +const stream = require('../../lib/ours/index') test('unpipe drain', function (t) { try { diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js index 3cce73b00f..834ab94523 100644 --- a/test/browser/test-stream2-writable.js +++ b/test/browser/test-stream2-writable.js @@ -2,7 +2,7 @@ const test = require('tape') const inherits = require('inherits') -const { Duplex, Writable } = require('../../lib') +const { Duplex, Writable } = require('../../lib/ours/index') inherits(TestWriter, Writable) diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js index 34aa272d88..f7b22f7b46 100644 --- a/test/browser/test-stream3-pause-then-read.js +++ b/test/browser/test-stream3-pause-then-read.js @@ -1,7 +1,7 @@ 'use strict' const test = require('tape') -const { Readable, Writable } = require('../../lib') +const { Readable, Writable } = require('../../lib/ours/index') test('pause then read', function (t) { t.plan(7) diff --git a/test/test-browser.js b/test/test-browser.js deleted file mode 100644 index b555996722..0000000000 --- a/test/test-browser.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict' - -const test = require('tape') - -test('streams', function (t) { - require('./browser/test-stream-big-packet')(t) - require('./browser/test-stream-big-push')(t) - require('./browser/test-stream-duplex')(t) - require('./browser/test-stream-end-paused')(t) - require('./browser/test-stream-ispaused')(t) - require('./browser/test-stream-finished')(t) - require('./browser/test-stream-pipeline')(t) - require('./browser/test-stream-pipe-after-end')(t) - require('./browser/test-stream-pipe-cleanup')(t) - require('./browser/test-stream-pipe-cleanup-pause')(t) - require('./browser/test-stream-pipe-error-handling')(t) - require('./browser/test-stream-pipe-event')(t) - require('./browser/test-stream-push-order')(t) - require('./browser/test-stream-push-strings')(t) - require('./browser/test-stream-readable-constructor-set-methods')(t) - require('./browser/test-stream-readable-event')(t) - require('./browser/test-stream-transform-constructor-set-methods')(t) - require('./browser/test-stream-transform-objectmode-falsey-value')(t) - require('./browser/test-stream-transform-split-objectmode')(t) - require('./browser/test-stream-unshift-empty-chunk')(t) - require('./browser/test-stream-unshift-read-race')(t) - require('./browser/test-stream-writable-change-default-encoding')(t) - require('./browser/test-stream-writable-constructor-set-methods')(t) - require('./browser/test-stream-writable-decoded-encoding')(t) - require('./browser/test-stream-writev')(t) - require('./browser/test-stream-sync-write')(t) - require('./browser/test-stream-pipe-without-listenerCount') -}) - -test('streams 2', function (t) { - require('./browser/test-stream2-base64-single-char-read-end')(t) - require('./browser/test-stream2-compatibility')(t) - require('./browser/test-stream2-large-read-stall')(t) - require('./browser/test-stream2-objects')(t) - require('./browser/test-stream2-pipe-error-handling')(t) - require('./browser/test-stream2-pipe-error-once-listener')(t) - require('./browser/test-stream2-push')(t) - require('./browser/test-stream2-readable-empty-buffer-no-eof')(t) - require('./browser/test-stream2-readable-from-list')(t) - require('./browser/test-stream2-transform')(t) - require('./browser/test-stream2-set-encoding')(t) - require('./browser/test-stream2-readable-legacy-drain')(t) - require('./browser/test-stream2-readable-wrap-empty')(t) - require('./browser/test-stream2-readable-non-empty-end')(t) - require('./browser/test-stream2-readable-wrap')(t) - require('./browser/test-stream2-unpipe-drain')(t) - require('./browser/test-stream2-writable')(t) -}) - -test('streams 3', function (t) { - require('./browser/test-stream3-pause-then-read')(t) -}) From 1deef552606c77270a0765d2ff3f10dfd84f02f4 Mon Sep 17 00:00:00 2001 From: Shogun Date: Thu, 21 Apr 2022 08:37:45 +0200 Subject: [PATCH 05/19] feat: Restore compatibility with Node 12.x. --- .github/workflows/node.yml | 2 +- babel.config.cjs | 3 + build/build.mjs | 52 +- build/footers.mjs | 11 + build/replacements.mjs | 14 +- lib/_stream_duplex.js | 6 +- lib/_stream_passthrough.js | 6 +- lib/_stream_readable.js | 6 +- lib/_stream_transform.js | 6 +- lib/_stream_writable.js | 6 +- lib/internal/streams/add-abort-signal.js | 58 +- lib/internal/streams/buffer_list.js | 227 ++- lib/internal/streams/compose.js | 181 +-- lib/internal/streams/destroy.js | 286 ++-- lib/internal/streams/duplex.js | 130 +- lib/internal/streams/duplexify.js | 435 +++--- lib/internal/streams/end-of-stream.js | 251 +-- lib/internal/streams/from.js | 111 +- lib/internal/streams/lazy_transform.js | 43 +- lib/internal/streams/legacy.js | 122 +- lib/internal/streams/operators.js | 496 +++--- lib/internal/streams/passthrough.js | 27 +- lib/internal/streams/pipeline.js | 451 +++--- lib/internal/streams/readable.js | 1378 ++++++++--------- lib/internal/streams/state.js | 31 +- lib/internal/streams/transform.js | 113 +- lib/internal/streams/utils.js | 328 ++-- lib/internal/streams/writable.js | 874 +++++------ lib/internal/validators.js | 247 ++- lib/ours/browser.js | 12 +- lib/ours/errors.js | 73 +- lib/ours/index.js | 18 +- lib/ours/primordials.js | 31 +- lib/ours/util.js | 34 +- lib/stream.js | 156 +- lib/stream/promises.js | 58 +- package.json | 19 +- .../browser/test-stream2-large-read-stall.js | 3 +- src/util.js | 14 + test/browser/test-stream-big-packet.js | 27 +- test/browser/test-stream-big-push.js | 16 +- test/browser/test-stream-duplex.js | 18 +- test/browser/test-stream-end-paused.js | 4 +- test/browser/test-stream-finished.js | 15 +- test/browser/test-stream-ispaused.js | 15 +- test/browser/test-stream-pipe-after-end.js | 22 +- .../browser/test-stream-pipe-cleanup-pause.js | 9 +- test/browser/test-stream-pipe-cleanup.js | 21 +- .../test-stream-pipe-error-handling.js | 21 +- test/browser/test-stream-pipe-event.js | 10 +- .../test-stream-pipe-without-listenerCount.js | 4 +- test/browser/test-stream-pipeline.js | 14 +- test/browser/test-stream-push-order.js | 5 +- test/browser/test-stream-push-strings.js | 10 +- ...stream-readable-constructor-set-methods.js | 7 +- test/browser/test-stream-readable-event.js | 38 +- test/browser/test-stream-sync-write.js | 6 +- ...tream-transform-constructor-set-methods.js | 6 +- ...tream-transform-objectmode-falsey-value.js | 18 +- .../test-stream-transform-split-objectmode.js | 27 +- .../test-stream-unshift-empty-chunk.js | 9 +- test/browser/test-stream-unshift-read-race.js | 34 +- ...stream-writable-change-default-encoding.js | 25 +- ...stream-writable-constructor-set-methods.js | 15 +- .../test-stream-writable-decoded-encoding.js | 19 +- test/browser/test-stream-writev.js | 66 +- ...est-stream2-base64-single-char-read-end.js | 8 +- test/browser/test-stream2-compatibility.js | 5 +- test/browser/test-stream2-large-read-stall.js | 17 +- test/browser/test-stream2-objects.js | 167 +- .../test-stream2-pipe-error-handling.js | 18 +- .../test-stream2-pipe-error-once-listener.js | 6 +- test/browser/test-stream2-push.js | 21 +- ...st-stream2-readable-empty-buffer-no-eof.js | 32 +- .../test-stream2-readable-from-list.js | 97 +- .../test-stream2-readable-legacy-drain.js | 6 +- .../test-stream2-readable-non-empty-end.js | 15 +- .../test-stream2-readable-wrap-empty.js | 7 +- test/browser/test-stream2-readable-wrap.js | 56 +- test/browser/test-stream2-set-encoding.js | 65 +- test/browser/test-stream2-transform.js | 198 ++- test/browser/test-stream2-unpipe-drain.js | 9 +- test/browser/test-stream2-writable.js | 88 +- test/browser/test-stream3-pause-then-read.js | 46 +- test/common/fixtures.js | 29 +- test/common/fixtures.mjs | 18 +- test/common/index.js | 945 +++++------ test/common/index.mjs | 10 +- test/common/tmpdir.js | 62 +- test/ours/test-errors.js | 21 +- test/ours/test-fake-timers.js | 11 +- test/ours/test-stream-sync-write.js | 9 +- .../test-readable-from-iterator-closing.js | 223 +-- test/parallel/test-readable-from.js | 245 ++- test/parallel/test-readable-large-hwm.js | 55 +- test/parallel/test-readable-single-end.js | 47 +- test/parallel/test-stream-add-abort-signal.js | 59 +- test/parallel/test-stream-aliases-legacy.js | 45 +- test/parallel/test-stream-asIndexedPairs.mjs | 100 +- test/parallel/test-stream-auto-destroy.js | 186 +-- ...riters-in-synchronously-recursion-write.js | 63 +- test/parallel/test-stream-backpressure.js | 75 +- ...-base-prototype-accessors-enumerability.js | 48 +- .../parallel/test-stream-base-typechecking.js | 72 +- test/parallel/test-stream-big-packet.js | 89 +- test/parallel/test-stream-big-push.js | 89 +- test/parallel/test-stream-buffer-list.js | 139 +- test/parallel/test-stream-catch-rejections.js | 100 +- test/parallel/test-stream-compose.js | 593 +++---- test/parallel/test-stream-construct.js | 393 ++--- .../test-stream-decoder-objectmode.js | 56 +- .../test-stream-destroy-event-order.js | 72 +- test/parallel/test-stream-drop-take.js | 195 ++- test/parallel/test-stream-duplex-destroy.js | 417 ++--- test/parallel/test-stream-duplex-end.js | 78 +- test/parallel/test-stream-duplex-from.js | 461 +++--- test/parallel/test-stream-duplex-props.js | 61 +- .../test-stream-duplex-readable-end.js | 70 +- .../test-stream-duplex-readable-writable.js | 88 +- .../test-stream-duplex-writable-finished.js | 73 +- test/parallel/test-stream-duplex.js | 80 +- test/parallel/test-stream-end-of-streams.js | 55 +- test/parallel/test-stream-end-paused.js | 76 +- test/parallel/test-stream-error-once.js | 55 +- test/parallel/test-stream-events-prepend.js | 53 +- test/parallel/test-stream-filter.js | 285 ++-- test/parallel/test-stream-finished.js | 1072 +++++++------ test/parallel/test-stream-flatMap.js | 252 +-- test/parallel/test-stream-forEach.js | 261 ++-- test/parallel/test-stream-inheritance.js | 124 +- test/parallel/test-stream-ispaused.js | 57 +- ...-stream-iterator-helpers-test262-tests.mjs | 159 +- .../test-stream-objectmode-undefined.js | 79 +- .../test-stream-once-readable-pipe.js | 118 +- .../parallel/test-stream-passthrough-drain.js | 44 +- test/parallel/test-stream-pipe-after-end.js | 87 +- ...t-stream-pipe-await-drain-manual-resume.js | 147 +- ...tream-pipe-await-drain-push-while-write.js | 75 +- test/parallel/test-stream-pipe-await-drain.js | 95 +- .../test-stream-pipe-cleanup-pause.js | 93 +- test/parallel/test-stream-pipe-cleanup.js | 195 +-- .../test-stream-pipe-error-handling.js | 191 +-- .../test-stream-pipe-error-unhandled.js | 59 +- test/parallel/test-stream-pipe-event.js | 72 +- .../test-stream-pipe-flow-after-unpipe.js | 61 +- test/parallel/test-stream-pipe-flow.js | 148 +- .../test-stream-pipe-manual-resume.js | 67 +- .../test-stream-pipe-multiple-pipes.js | 101 +- test/parallel/test-stream-pipe-needDrain.js | 61 +- ...test-stream-pipe-same-destination-twice.js | 134 +- .../test-stream-pipe-unpipe-streams.js | 186 +-- .../test-stream-pipe-without-listenerCount.js | 54 +- .../test-stream-pipeline-async-iterator.js | 67 +- test/parallel/test-stream-pipeline-http2.js | 84 +- .../test-stream-pipeline-listeners.js | 136 +- test/parallel/test-stream-pipeline-process.js | 64 +- ...t-stream-pipeline-queued-end-in-destroy.js | 73 +- .../parallel/test-stream-pipeline-uncaught.js | 70 +- .../test-stream-pipeline-with-empty-string.js | 45 +- test/parallel/test-stream-preprocess.js | 136 +- test/parallel/test-stream-promises.js | 173 +-- test/parallel/test-stream-push-order.js | 66 +- test/parallel/test-stream-push-strings.js | 82 +- test/parallel/test-stream-readable-aborted.js | 118 +- ...t-stream-readable-add-chunk-during-data.js | 65 +- ...stream-readable-constructor-set-methods.js | 43 +- test/parallel/test-stream-readable-data.js | 52 +- test/parallel/test-stream-readable-destroy.js | 642 ++++---- test/parallel/test-stream-readable-didRead.js | 143 +- ...eam-readable-emit-readable-short-stream.js | 252 ++- .../test-stream-readable-emittedReadable.js | 155 +- .../test-stream-readable-end-destroyed.js | 52 +- test/parallel/test-stream-readable-ended.js | 98 +- .../test-stream-readable-error-end.js | 50 +- test/parallel/test-stream-readable-event.js | 147 +- .../test-stream-readable-flow-recursion.js | 105 +- .../test-stream-readable-hwm-0-async.js | 60 +- ...test-stream-readable-hwm-0-no-flow-data.js | 146 +- test/parallel/test-stream-readable-hwm-0.js | 69 +- .../test-stream-readable-infinite-read.js | 74 +- .../test-stream-readable-invalid-chunk.js | 77 +- .../test-stream-readable-needReadable.js | 219 +-- .../test-stream-readable-next-no-null.js | 61 +- ...st-stream-readable-no-unneeded-readable.js | 98 +- ...stream-readable-object-multi-push-async.js | 236 +-- .../test-stream-readable-pause-and-resume.js | 117 +- ...st-stream-readable-readable-then-resume.js | 75 +- .../parallel/test-stream-readable-readable.js | 90 +- ...est-stream-readable-reading-readingMore.js | 253 ++- .../test-stream-readable-resume-hwm.js | 52 +- .../test-stream-readable-resumeScheduled.js | 139 +- ...m-readable-setEncoding-existing-buffers.js | 112 +- .../test-stream-readable-setEncoding-null.js | 49 +- .../test-stream-readable-unpipe-resume.js | 53 +- test/parallel/test-stream-readable-unshift.js | 272 ++-- ...tream-readable-with-unimplemented-_read.js | 52 +- .../test-stream-readableListening-state.js | 85 +- test/parallel/test-stream-reduce.js | 252 +-- test/parallel/test-stream-some-find-every.mjs | 258 +-- test/parallel/test-stream-toArray.js | 162 +- .../test-stream-transform-callback-twice.js | 57 +- ...tream-transform-constructor-set-methods.js | 69 +- .../parallel/test-stream-transform-destroy.js | 232 ++- .../test-stream-transform-final-sync.js | 144 +- test/parallel/test-stream-transform-final.js | 149 +- .../test-stream-transform-flush-data.js | 52 +- ...tream-transform-objectmode-falsey-value.js | 85 +- ...st-stream-transform-split-highwatermark.js | 175 ++- .../test-stream-transform-split-objectmode.js | 149 +- test/parallel/test-stream-uint8array.js | 141 +- test/parallel/test-stream-unpipe-event.js | 143 +- .../test-stream-unshift-empty-chunk.js | 116 +- .../parallel/test-stream-unshift-read-race.js | 202 +-- test/parallel/test-stream-writable-aborted.js | 61 +- .../test-stream-writable-callback-twice.js | 57 +- ...stream-writable-change-default-encoding.js | 132 +- .../test-stream-writable-clear-buffer.js | 68 +- ...stream-writable-constructor-set-methods.js | 80 +- .../test-stream-writable-decoded-encoding.js | 82 +- test/parallel/test-stream-writable-destroy.js | 811 +++++----- .../test-stream-writable-end-cb-error.js | 168 +- .../test-stream-writable-end-cb-uncaught.js | 72 +- .../test-stream-writable-end-multiple.js | 71 +- .../test-stream-writable-ended-state.js | 84 +- .../test-stream-writable-final-async.js | 62 +- .../test-stream-writable-final-destroy.js | 51 +- .../test-stream-writable-final-throw.js | 52 +- .../test-stream-writable-finish-destroyed.js | 86 +- .../test-stream-writable-finished-state.js | 66 +- .../parallel/test-stream-writable-finished.js | 150 +- .../test-stream-writable-invalid-chunk.js | 85 +- .../test-stream-writable-needdrain-state.js | 61 +- test/parallel/test-stream-writable-null.js | 107 +- .../test-stream-writable-properties.js | 64 +- .../test-stream-writable-samecb-singletick.js | 86 +- .../parallel/test-stream-writable-writable.js | 79 +- .../test-stream-writable-write-cb-error.js | 106 +- .../test-stream-writable-write-cb-twice.js | 97 +- .../test-stream-writable-write-error.js | 107 +- ...est-stream-writable-write-writev-finish.js | 246 +-- .../test-stream-writableState-ending.js | 68 +- ...ableState-uncorked-bufferedRequestCount.js | 102 +- test/parallel/test-stream-write-destroy.js | 95 +- test/parallel/test-stream-write-drain.js | 46 +- test/parallel/test-stream-write-final.js | 69 +- test/parallel/test-stream-writev.js | 208 +-- ...est-stream2-base64-single-char-read-end.js | 86 +- test/parallel/test-stream2-basic.js | 646 ++++---- test/parallel/test-stream2-compatibility.js | 84 +- test/parallel/test-stream2-decode-partial.js | 66 +- .../test-stream2-finish-pipe-error.js | 58 +- test/parallel/test-stream2-finish-pipe.js | 60 +- .../test-stream2-httpclient-response-end.js | 98 +- .../parallel/test-stream2-large-read-stall.js | 96 +- test/parallel/test-stream2-objects.js | 490 +++--- .../test-stream2-pipe-error-handling.js | 169 +- .../test-stream2-pipe-error-once-listener.js | 63 +- test/parallel/test-stream2-push.js | 187 ++- test/parallel/test-stream2-read-sync-stack.js | 62 +- ...st-stream2-readable-empty-buffer-no-eof.js | 155 +- .../test-stream2-readable-from-list.js | 173 ++- .../test-stream2-readable-legacy-drain.js | 79 +- .../test-stream2-readable-non-empty-end.js | 106 +- .../test-stream2-readable-wrap-destroy.js | 55 +- .../test-stream2-readable-wrap-empty.js | 49 +- .../test-stream2-readable-wrap-error.js | 87 +- test/parallel/test-stream2-readable-wrap.js | 173 ++- test/parallel/test-stream2-set-encoding.js | 540 +++---- test/parallel/test-stream2-transform.js | 784 +++++----- test/parallel/test-stream2-unpipe-drain.js | 81 +- test/parallel/test-stream2-unpipe-leak.js | 85 +- test/parallel/test-stream2-writable.js | 692 +++++---- test/parallel/test-stream3-cork-end.js | 135 +- test/parallel/test-stream3-cork-uncork.js | 126 +- test/parallel/test-stream3-pause-then-read.js | 250 +-- test/parallel/test-streams-highwatermark.js | 132 +- 276 files changed, 18218 insertions(+), 16675 deletions(-) create mode 100644 babel.config.cjs diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml index 78c07dfc8c..a8877354bc 100644 --- a/.github/workflows/node.yml +++ b/.github/workflows/node.yml @@ -10,7 +10,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - node-version: [14.x, 16.x, 17.x] + node-version: [12.x, 14.x, 16.x, 17.x] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/babel.config.cjs b/babel.config.cjs new file mode 100644 index 0000000000..d675db2f15 --- /dev/null +++ b/babel.config.cjs @@ -0,0 +1,3 @@ +module.exports = { + plugins: ['@babel/proposal-nullish-coalescing-operator', '@babel/proposal-optional-chaining'] +} diff --git a/build/build.mjs b/build/build.mjs index 4176b7431c..4123afda5b 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -1,10 +1,13 @@ +import { transform } from '@babel/core' import { createReadStream } from 'node:fs' -import { copyFile, cp, mkdir, readFile, rm, writeFile } from 'node:fs/promises' +import { mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises' import { dirname, resolve } from 'node:path' import process from 'node:process' import { finished } from 'node:stream/promises' +import prettier from 'prettier' import { Parse } from 'tar' import { request } from 'undici' +import prettierConfig from '../prettier.config.cjs' import { aliases, skippedSources, sources } from './files.mjs' import { footers } from './footers.mjs' import { replacements } from './replacements.mjs' @@ -66,6 +69,8 @@ async function processFiles(contents) { const replacementsKeys = Object.keys(replacements) const footersKeys = Object.keys(footers) + prettierConfig.parser = 'babel' + for (let [path, content] of contents) { const modifications = [] const matchingReplacements = replacementsKeys.filter((k) => new RegExp(k).test(path)) @@ -93,6 +98,13 @@ async function processFiles(contents) { } } + // Process the file through babel and prettier + if (path.endsWith('.js')) { + modifications.push(highlightFile('babel', 33), highlightFile('prettier', 33)) + console.log(prettierConfig) + content = prettier.format(await transform(content).code.replaceAll('void 0', 'undefined'), prettierConfig) + } + if (!modifications.length) { modifications.push('no modifications') } @@ -152,12 +164,26 @@ async function main() { replacements['README.md'][0][1] = replacements['README.md'][0][1].replace('$2', nodeVersion) replacements['README.md'][1][1] = replacements['README.md'][1][1].replace('$2', nodeVersion) + // Add custom files + contents.push(['lib/ours/browser.js', await readFile('src/browser.js', 'utf-8')]) + contents.push(['lib/ours/index.js', await readFile('src/index.js', 'utf-8')]) + contents.push(['lib/ours/errors.js', await readFile('src/errors.js', 'utf-8')]) + contents.push(['lib/ours/primordials.js', await readFile('src/primordials.js', 'utf-8')]) + contents.push(['lib/ours/util.js', await readFile('src/util.js', 'utf-8')]) + + for (const file of await readdir('src/test/ours')) { + contents.push([`test/ours/${file}`, await readFile(`src/test/ours/${file}`, 'utf-8')]) + } + + for (const file of await readdir('src/test/browser')) { + contents.push([`test/browser/${file}`, await readFile(`src/test/browser/${file}`, 'utf-8')]) + } + contents.push(['README.md', await readFile('./README.md', 'utf-8')]) // Create paths const paths = new Set(contents.map((c) => dirname(c[0]))) paths.delete('.') - paths.add('lib/ours') for (const path of paths.values()) { console.log(`Creating directory ${highlightFile(path, 32)} ...`) @@ -166,28 +192,6 @@ async function main() { // Perform replacements await processFiles(contents) - - // Copy template files - console.log(`Copying template to file ${highlightFile('lib/ours/browser.js', 32)} ...`) - await copyFile('src/browser.js', 'lib/ours/browser.js') - - console.log(`Copying template to file ${highlightFile('lib/ours/index.js', 32)} ...`) - await copyFile('src/index.js', 'lib/ours/index.js') - - console.log(`Copying template to file ${highlightFile('lib/ours/errors.js', 32)} ...`) - await copyFile('src/errors.js', 'lib/ours/errors.js') - - console.log(`Copying template to file ${highlightFile('lib/ours/primordials.js', 32)} ...`) - await copyFile('src/primordials.js', 'lib/ours/primordials.js') - - console.log(`Copying template to file ${highlightFile('lib/ours/util.js', 32)} ...`) - await copyFile('src/util.js', 'lib/ours/util.js') - - console.log(`Copying folder ${highlightFile('test/browser', 32)} ...`) - await cp('src/test/browser', 'test/browser', { recursive: true }) - - console.log(`Copying folder ${highlightFile('test/ours', 32)} ...`) - await cp('src/test/ours', 'test/ours', { recursive: true }) } await main() diff --git a/build/footers.mjs b/build/footers.mjs index 9e3b314f49..1980fbb7ed 100644 --- a/build/footers.mjs +++ b/build/footers.mjs @@ -1,5 +1,16 @@ const testPolyfills = ` /* replacement start */ + if (typeof Blob === 'undefined') { + let { Blob } = require('buffer') + + if (typeof Blob === 'undefined') { + Blob = require('blob-polyfill').Blob + } + + globalThis.Blob = Blob + allowGlobals(Blob) + } + if (typeof EventTarget === 'undefined') { globalThis.EventTarget = require('event-target-shim').EventTarget; } diff --git a/build/replacements.mjs b/build/replacements.mjs index 0fa985b7dc..aaaaab365c 100644 --- a/build/replacements.mjs +++ b/build/replacements.mjs @@ -8,8 +8,6 @@ const internalStreamsBlob = [ return b instanceof Blob } } - - const { Blob } = require('buffer'); ` ] @@ -137,6 +135,17 @@ const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/ours/ const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"] +const testParallelDuplexFromBlob = [ + "const \\{ Blob \\} = require\\('buffer'\\)", + ` + let {Blob} = require('buffer'); + + if (typeof Blob === 'undefined') { + Blob = require('blob-polyfill').Blob; + } + ` +] + const testParallelFinishedEvent = ["res.on\\('close", "res.on('finish"] const testParallelFlatMapWinLineSeparator = [ @@ -247,6 +256,7 @@ export const replacements = { testParallelSilentConsole, testParallelTimersPromises ], + 'test/parallel/test-stream-duplex-from.js': [testParallelDuplexFromBlob], 'test/parallel/test-stream-finished.js': [testParallelFinishedEvent], 'test/parallel/test-stream-flatMap.js': [testParallelFlatMapWinLineSeparator], 'test/parallel/test-stream-preprocess.js': [testParallelPreprocessWinLineSeparator], diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js index ae4d1e0b9c..105eebbd1c 100644 --- a/lib/_stream_duplex.js +++ b/lib/_stream_duplex.js @@ -1,5 +1,3 @@ -'use strict'; +'use strict' // Keep this file as an alias for the full stream module. -// Keep this file as an alias for the full stream module. - -module.exports = require('./stream').Duplex; +module.exports = require('./stream').Duplex diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js index 31b74aeb3d..31358e6d12 100644 --- a/lib/_stream_passthrough.js +++ b/lib/_stream_passthrough.js @@ -1,5 +1,3 @@ -'use strict'; +'use strict' // Keep this file as an alias for the full stream module. -// Keep this file as an alias for the full stream module. - -module.exports = require('./stream').PassThrough; +module.exports = require('./stream').PassThrough diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 5db2154a12..abd53db4ca 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -1,5 +1,3 @@ -'use strict'; +'use strict' // Keep this file as an alias for the full stream module. -// Keep this file as an alias for the full stream module. - -module.exports = require('./stream').Readable; +module.exports = require('./stream').Readable diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js index f8e475fa6e..98ea338248 100644 --- a/lib/_stream_transform.js +++ b/lib/_stream_transform.js @@ -1,5 +1,3 @@ -'use strict'; +'use strict' // Keep this file as an alias for the full stream module. -// Keep this file as an alias for the full stream module. - -module.exports = require('./stream').Transform; +module.exports = require('./stream').Transform diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js index 70cfe234dd..07204c4295 100644 --- a/lib/_stream_writable.js +++ b/lib/_stream_writable.js @@ -1,5 +1,3 @@ -'use strict'; +'use strict' // Keep this file as an alias for the full stream module. -// Keep this file as an alias for the full stream module. - -module.exports = require('./stream').Writable; +module.exports = require('./stream').Writable diff --git a/lib/internal/streams/add-abort-signal.js b/lib/internal/streams/add-abort-signal.js index 0cfda5d75a..8d5a840f70 100644 --- a/lib/internal/streams/add-abort-signal.js +++ b/lib/internal/streams/add-abort-signal.js @@ -1,46 +1,52 @@ -'use strict'; +'use strict' -const { - AbortError, - codes, -} = require('../../ours/errors'); +const { AbortError, codes } = require('../../ours/errors') -const eos = require('./end-of-stream'); -const { ERR_INVALID_ARG_TYPE } = codes; +const eos = require('./end-of-stream') -// This method is inlined here for readable-stream +const { ERR_INVALID_ARG_TYPE } = codes // This method is inlined here for readable-stream // It also does not allow for signal to not exist on the stream // https://github.com/nodejs/node/pull/36061#discussion_r533718029 + const validateAbortSignal = (signal, name) => { - if (typeof signal !== 'object' || - !('aborted' in signal)) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal); + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) } -}; +} function isNodeStream(obj) { - return !!(obj && typeof obj.pipe === 'function'); + return !!(obj && typeof obj.pipe === 'function') } module.exports.addAbortSignal = function addAbortSignal(signal, stream) { - validateAbortSignal(signal, 'signal'); + validateAbortSignal(signal, 'signal') + if (!isNodeStream(stream)) { - throw new ERR_INVALID_ARG_TYPE('stream', 'stream.Stream', stream); + throw new ERR_INVALID_ARG_TYPE('stream', 'stream.Stream', stream) } - return module.exports.addAbortSignalNoValidate(signal, stream); -}; -module.exports.addAbortSignalNoValidate = function(signal, stream) { + + return module.exports.addAbortSignalNoValidate(signal, stream) +} + +module.exports.addAbortSignalNoValidate = function (signal, stream) { if (typeof signal !== 'object' || !('aborted' in signal)) { - return stream; + return stream } + const onAbort = () => { - stream.destroy(new AbortError(undefined, { cause: signal.reason })); - }; + stream.destroy( + new AbortError(undefined, { + cause: signal.reason + }) + ) + } + if (signal.aborted) { - onAbort(); + onAbort() } else { - signal.addEventListener('abort', onAbort); - eos(stream, () => signal.removeEventListener('abort', onAbort)); + signal.addEventListener('abort', onAbort) + eos(stream, () => signal.removeEventListener('abort', onAbort)) } - return stream; -}; + + return stream +} diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js index bef1a9cb3b..01fd48ecf3 100644 --- a/lib/internal/streams/buffer_list.js +++ b/lib/internal/streams/buffer_list.js @@ -1,174 +1,173 @@ -'use strict'; +'use strict' -const { - StringPrototypeSlice, - SymbolIterator, - TypedArrayPrototypeSet, - Uint8Array, -} = require('../../ours/primordials'); +const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials') -const { Buffer } = require('buffer'); -const { inspect } = require('util'); +const { Buffer } = require('buffer') + +const { inspect } = require('util') module.exports = class BufferList { constructor() { - this.head = null; - this.tail = null; - this.length = 0; + this.head = null + this.tail = null + this.length = 0 } push(v) { - const entry = { data: v, next: null }; - if (this.length > 0) - this.tail.next = entry; - else - this.head = entry; - this.tail = entry; - ++this.length; + const entry = { + data: v, + next: null + } + if (this.length > 0) this.tail.next = entry + else this.head = entry + this.tail = entry + ++this.length } unshift(v) { - const entry = { data: v, next: this.head }; - if (this.length === 0) - this.tail = entry; - this.head = entry; - ++this.length; + const entry = { + data: v, + next: this.head + } + if (this.length === 0) this.tail = entry + this.head = entry + ++this.length } shift() { - if (this.length === 0) - return; - const ret = this.head.data; - if (this.length === 1) - this.head = this.tail = null; - else - this.head = this.head.next; - --this.length; - return ret; + if (this.length === 0) return + const ret = this.head.data + if (this.length === 1) this.head = this.tail = null + else this.head = this.head.next + --this.length + return ret } clear() { - this.head = this.tail = null; - this.length = 0; + this.head = this.tail = null + this.length = 0 } join(s) { - if (this.length === 0) - return ''; - let p = this.head; - let ret = '' + p.data; - while ((p = p.next) !== null) - ret += s + p.data; - return ret; + if (this.length === 0) return '' + let p = this.head + let ret = '' + p.data + + while ((p = p.next) !== null) ret += s + p.data + + return ret } concat(n) { - if (this.length === 0) - return Buffer.alloc(0); - const ret = Buffer.allocUnsafe(n >>> 0); - let p = this.head; - let i = 0; + if (this.length === 0) return Buffer.alloc(0) + const ret = Buffer.allocUnsafe(n >>> 0) + let p = this.head + let i = 0 + while (p) { - TypedArrayPrototypeSet(ret, p.data, i); - i += p.data.length; - p = p.next; + TypedArrayPrototypeSet(ret, p.data, i) + i += p.data.length + p = p.next } - return ret; - } - // Consumes a specified amount of bytes or characters from the buffered data. + return ret + } // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { - const data = this.head.data; + const data = this.head.data + if (n < data.length) { // `slice` is the same for buffers and strings. - const slice = data.slice(0, n); - this.head.data = data.slice(n); - return slice; + const slice = data.slice(0, n) + this.head.data = data.slice(n) + return slice } + if (n === data.length) { // First chunk is a perfect match. - return this.shift(); - } - // Result spans more than one buffer. - return hasStrings ? this._getString(n) : this._getBuffer(n); + return this.shift() + } // Result spans more than one buffer. + + return hasStrings ? this._getString(n) : this._getBuffer(n) } first() { - return this.head.data; + return this.head.data } *[SymbolIterator]() { for (let p = this.head; p; p = p.next) { - yield p.data; + yield p.data } - } + } // Consumes a specified amount of characters from the buffered data. - // Consumes a specified amount of characters from the buffered data. _getString(n) { - let ret = ''; - let p = this.head; - let c = 0; + let ret = '' + let p = this.head + let c = 0 + do { - const str = p.data; + const str = p.data + if (n > str.length) { - ret += str; - n -= str.length; + ret += str + n -= str.length } else { if (n === str.length) { - ret += str; - ++c; - if (p.next) - this.head = p.next; - else - this.head = this.tail = null; + ret += str + ++c + if (p.next) this.head = p.next + else this.head = this.tail = null } else { - ret += StringPrototypeSlice(str, 0, n); - this.head = p; - p.data = StringPrototypeSlice(str, n); + ret += StringPrototypeSlice(str, 0, n) + this.head = p + p.data = StringPrototypeSlice(str, n) } - break; + + break } - ++c; - } while ((p = p.next) !== null); - this.length -= c; - return ret; - } - // Consumes a specified amount of bytes from the buffered data. + ++c + } while ((p = p.next) !== null) + + this.length -= c + return ret + } // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { - const ret = Buffer.allocUnsafe(n); - const retLen = n; - let p = this.head; - let c = 0; + const ret = Buffer.allocUnsafe(n) + const retLen = n + let p = this.head + let c = 0 + do { - const buf = p.data; + const buf = p.data + if (n > buf.length) { - TypedArrayPrototypeSet(ret, buf, retLen - n); - n -= buf.length; + TypedArrayPrototypeSet(ret, buf, retLen - n) + n -= buf.length } else { if (n === buf.length) { - TypedArrayPrototypeSet(ret, buf, retLen - n); - ++c; - if (p.next) - this.head = p.next; - else - this.head = this.tail = null; + TypedArrayPrototypeSet(ret, buf, retLen - n) + ++c + if (p.next) this.head = p.next + else this.head = this.tail = null } else { - TypedArrayPrototypeSet(ret, - new Uint8Array(buf.buffer, buf.byteOffset, n), - retLen - n); - this.head = p; - p.data = buf.slice(n); + TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n) + this.head = p + p.data = buf.slice(n) } - break; + + break } - ++c; - } while ((p = p.next) !== null); - this.length -= c; - return ret; - } - // Make sure the linked list only shows the minimal necessary information. + ++c + } while ((p = p.next) !== null) + + this.length -= c + return ret + } // Make sure the linked list only shows the minimal necessary information. + [Symbol.for('nodejs.util.inspect.custom')](_, options) { return inspect(this, { ...options, @@ -176,6 +175,6 @@ module.exports = class BufferList { depth: 0, // It should not recurse. customInspect: false - }); + }) } -}; +} diff --git a/lib/internal/streams/compose.js b/lib/internal/streams/compose.js index 47f1c547c3..0a2e810a3e 100644 --- a/lib/internal/streams/compose.js +++ b/lib/internal/streams/compose.js @@ -1,174 +1,161 @@ -'use strict'; +'use strict' + +const { pipeline } = require('./pipeline') + +const Duplex = require('./duplex') + +const { destroyer } = require('./destroy') + +const { isNodeStream, isReadable, isWritable } = require('./utils') -const { pipeline } = require('./pipeline'); -const Duplex = require('./duplex'); -const { destroyer } = require('./destroy'); -const { - isNodeStream, - isReadable, - isWritable, -} = require('./utils'); const { AbortError, - codes: { - ERR_INVALID_ARG_VALUE, - ERR_MISSING_ARGS, - }, -} = require('../../ours/errors'); + codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS } +} = require('../../ours/errors') module.exports = function compose(...streams) { if (streams.length === 0) { - throw new ERR_MISSING_ARGS('streams'); + throw new ERR_MISSING_ARGS('streams') } if (streams.length === 1) { - return Duplex.from(streams[0]); + return Duplex.from(streams[0]) } - const orgStreams = [...streams]; + const orgStreams = [...streams] if (typeof streams[0] === 'function') { - streams[0] = Duplex.from(streams[0]); + streams[0] = Duplex.from(streams[0]) } if (typeof streams[streams.length - 1] === 'function') { - const idx = streams.length - 1; - streams[idx] = Duplex.from(streams[idx]); + const idx = streams.length - 1 + streams[idx] = Duplex.from(streams[idx]) } for (let n = 0; n < streams.length; ++n) { if (!isNodeStream(streams[n])) { // TODO(ronag): Add checks for non streams. - continue; + continue } + if (n < streams.length - 1 && !isReadable(streams[n])) { - throw new ERR_INVALID_ARG_VALUE( - `streams[${n}]`, - orgStreams[n], - 'must be readable' - ); + throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable') } + if (n > 0 && !isWritable(streams[n])) { - throw new ERR_INVALID_ARG_VALUE( - `streams[${n}]`, - orgStreams[n], - 'must be writable' - ); + throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable') } } - let ondrain; - let onfinish; - let onreadable; - let onclose; - let d; + let ondrain + let onfinish + let onreadable + let onclose + let d function onfinished(err) { - const cb = onclose; - onclose = null; + const cb = onclose + onclose = null if (cb) { - cb(err); + cb(err) } else if (err) { - d.destroy(err); + d.destroy(err) } else if (!readable && !writable) { - d.destroy(); + d.destroy() } } - const head = streams[0]; - const tail = pipeline(streams, onfinished); - - const writable = !!isWritable(head); - const readable = !!isReadable(tail); - - // TODO(ronag): Avoid double buffering. + const head = streams[0] + const tail = pipeline(streams, onfinished) + const writable = !!isWritable(head) + const readable = !!isReadable(tail) // TODO(ronag): Avoid double buffering. // Implement Writable/Readable/Duplex traits. // See, https://github.com/nodejs/node/pull/33515. + d = new Duplex({ // TODO (ronag): highWaterMark? - writableObjectMode: !!head?.writableObjectMode, - readableObjectMode: !!tail?.writableObjectMode, + writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode), + readableObjectMode: !!(tail !== null && tail !== undefined && tail.writableObjectMode), writable, - readable, - }); + readable + }) if (writable) { - d._write = function(chunk, encoding, callback) { + d._write = function (chunk, encoding, callback) { if (head.write(chunk, encoding)) { - callback(); + callback() } else { - ondrain = callback; + ondrain = callback } - }; + } - d._final = function(callback) { - head.end(); - onfinish = callback; - }; + d._final = function (callback) { + head.end() + onfinish = callback + } - head.on('drain', function() { + head.on('drain', function () { if (ondrain) { - const cb = ondrain; - ondrain = null; - cb(); + const cb = ondrain + ondrain = null + cb() } - }); - - tail.on('finish', function() { + }) + tail.on('finish', function () { if (onfinish) { - const cb = onfinish; - onfinish = null; - cb(); + const cb = onfinish + onfinish = null + cb() } - }); + }) } if (readable) { - tail.on('readable', function() { + tail.on('readable', function () { if (onreadable) { - const cb = onreadable; - onreadable = null; - cb(); + const cb = onreadable + onreadable = null + cb() } - }); - - tail.on('end', function() { - d.push(null); - }); + }) + tail.on('end', function () { + d.push(null) + }) - d._read = function() { + d._read = function () { while (true) { - const buf = tail.read(); + const buf = tail.read() if (buf === null) { - onreadable = d._read; - return; + onreadable = d._read + return } if (!d.push(buf)) { - return; + return } } - }; + } } - d._destroy = function(err, callback) { + d._destroy = function (err, callback) { if (!err && onclose !== null) { - err = new AbortError(); + err = new AbortError() } - onreadable = null; - ondrain = null; - onfinish = null; + onreadable = null + ondrain = null + onfinish = null if (onclose === null) { - callback(err); + callback(err) } else { - onclose = callback; - destroyer(tail, err); + onclose = callback + destroyer(tail, err) } - }; + } - return d; -}; + return d +} diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js index 6f1787953a..7eeda13615 100644 --- a/lib/internal/streams/destroy.js +++ b/lib/internal/streams/destroy.js @@ -1,184 +1,178 @@ -'use strict'; +'use strict' const { aggregateTwoErrors, - codes: { - ERR_MULTIPLE_CALLBACK, - }, - AbortError, -} = require('../../ours/errors'); -const { - Symbol, -} = require('../../ours/primordials'); -const { - kDestroyed, - isDestroyed, - isFinished, - isServerRequest -} = require('./utils'); + codes: { ERR_MULTIPLE_CALLBACK }, + AbortError +} = require('../../ours/errors') + +const { Symbol } = require('../../ours/primordials') + +const { kDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils') -const kDestroy = Symbol('kDestroy'); -const kConstruct = Symbol('kConstruct'); +const kDestroy = Symbol('kDestroy') +const kConstruct = Symbol('kConstruct') function checkError(err, w, r) { if (err) { // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - err.stack; // eslint-disable-line no-unused-expressions + err.stack // eslint-disable-line no-unused-expressions if (w && !w.errored) { - w.errored = err; + w.errored = err } + if (r && !r.errored) { - r.errored = err; + r.errored = err } } -} - -// Backwards compat. cb() is undocumented and unused in core but +} // Backwards compat. cb() is undocumented and unused in core but // unfortunately might be used by modules. + function destroy(err, cb) { - const r = this._readableState; - const w = this._writableState; - // With duplex streams we use the writable side for state. - const s = w || r; + const r = this._readableState + const w = this._writableState // With duplex streams we use the writable side for state. + + const s = w || r if ((w && w.destroyed) || (r && r.destroyed)) { if (typeof cb === 'function') { - cb(); + cb() } - return this; - } - - - // We set destroyed to true before firing error callbacks in order + return this + } // We set destroyed to true before firing error callbacks in order // to make it re-entrance safe in case destroy() is called within callbacks - checkError(err, w, r); + + checkError(err, w, r) if (w) { - w.destroyed = true; + w.destroyed = true } + if (r) { - r.destroyed = true; - } + r.destroyed = true + } // If still constructing then defer calling _destroy. - // If still constructing then defer calling _destroy. if (!s.constructed) { - this.once(kDestroy, function(er) { - _destroy(this, aggregateTwoErrors(er, err), cb); - }); + this.once(kDestroy, function (er) { + _destroy(this, aggregateTwoErrors(er, err), cb) + }) } else { - _destroy(this, err, cb); + _destroy(this, err, cb) } - return this; + return this } function _destroy(self, err, cb) { - let called = false; + let called = false function onDestroy(err) { if (called) { - return; + return } - called = true; - - const r = self._readableState; - const w = self._writableState; - checkError(err, w, r); + called = true + const r = self._readableState + const w = self._writableState + checkError(err, w, r) if (w) { - w.closed = true; + w.closed = true } + if (r) { - r.closed = true; + r.closed = true } if (typeof cb === 'function') { - cb(err); + cb(err) } if (err) { - process.nextTick(emitErrorCloseNT, self, err); + process.nextTick(emitErrorCloseNT, self, err) } else { - process.nextTick(emitCloseNT, self); + process.nextTick(emitCloseNT, self) } } + try { - self._destroy(err || null, onDestroy); + self._destroy(err || null, onDestroy) } catch (err) { - onDestroy(err); + onDestroy(err) } } function emitErrorCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); + emitErrorNT(self, err) + emitCloseNT(self) } function emitCloseNT(self) { - const r = self._readableState; - const w = self._writableState; + const r = self._readableState + const w = self._writableState if (w) { - w.closeEmitted = true; + w.closeEmitted = true } + if (r) { - r.closeEmitted = true; + r.closeEmitted = true } if ((w && w.emitClose) || (r && r.emitClose)) { - self.emit('close'); + self.emit('close') } } function emitErrorNT(self, err) { - const r = self._readableState; - const w = self._writableState; + const r = self._readableState + const w = self._writableState if ((w && w.errorEmitted) || (r && r.errorEmitted)) { - return; + return } if (w) { - w.errorEmitted = true; + w.errorEmitted = true } + if (r) { - r.errorEmitted = true; + r.errorEmitted = true } - self.emit('error', err); + self.emit('error', err) } function undestroy() { - const r = this._readableState; - const w = this._writableState; + const r = this._readableState + const w = this._writableState if (r) { - r.constructed = true; - r.closed = false; - r.closeEmitted = false; - r.destroyed = false; - r.errored = null; - r.errorEmitted = false; - r.reading = false; - r.ended = r.readable === false; - r.endEmitted = r.readable === false; + r.constructed = true + r.closed = false + r.closeEmitted = false + r.destroyed = false + r.errored = null + r.errorEmitted = false + r.reading = false + r.ended = r.readable === false + r.endEmitted = r.readable === false } if (w) { - w.constructed = true; - w.destroyed = false; - w.closed = false; - w.closeEmitted = false; - w.errored = null; - w.errorEmitted = false; - w.finalCalled = false; - w.prefinished = false; - w.ended = w.writable === false; - w.ending = w.writable === false; - w.finished = w.writable === false; + w.constructed = true + w.destroyed = false + w.closed = false + w.closeEmitted = false + w.errored = null + w.errorEmitted = false + w.finalCalled = false + w.prefinished = false + w.ended = w.writable === false + w.ending = w.writable === false + w.finished = w.writable === false } } @@ -188,144 +182,144 @@ function errorOrDestroy(stream, err, sync) { // For now when you opt-in to autoDestroy we allow // the error to be emitted nextTick. In a future // semver major update we should change the default to this. - - const r = stream._readableState; - const w = stream._writableState; + const r = stream._readableState + const w = stream._writableState if ((w && w.destroyed) || (r && r.destroyed)) { - return this; + return this } - if ((r && r.autoDestroy) || (w && w.autoDestroy)) - stream.destroy(err); + if ((r && r.autoDestroy) || (w && w.autoDestroy)) stream.destroy(err) else if (err) { // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - err.stack; // eslint-disable-line no-unused-expressions + err.stack // eslint-disable-line no-unused-expressions if (w && !w.errored) { - w.errored = err; + w.errored = err } + if (r && !r.errored) { - r.errored = err; + r.errored = err } + if (sync) { - process.nextTick(emitErrorNT, stream, err); + process.nextTick(emitErrorNT, stream, err) } else { - emitErrorNT(stream, err); + emitErrorNT(stream, err) } } } function construct(stream, cb) { if (typeof stream._construct !== 'function') { - return; + return } - const r = stream._readableState; - const w = stream._writableState; + const r = stream._readableState + const w = stream._writableState if (r) { - r.constructed = false; + r.constructed = false } + if (w) { - w.constructed = false; + w.constructed = false } - stream.once(kConstruct, cb); + stream.once(kConstruct, cb) if (stream.listenerCount(kConstruct) > 1) { // Duplex - return; + return } - process.nextTick(constructNT, stream); + process.nextTick(constructNT, stream) } function constructNT(stream) { - let called = false; + let called = false function onConstruct(err) { if (called) { - errorOrDestroy(stream, err ?? new ERR_MULTIPLE_CALLBACK()); - return; + errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK()) + return } - called = true; - const r = stream._readableState; - const w = stream._writableState; - const s = w || r; + called = true + const r = stream._readableState + const w = stream._writableState + const s = w || r if (r) { - r.constructed = true; + r.constructed = true } + if (w) { - w.constructed = true; + w.constructed = true } if (s.destroyed) { - stream.emit(kDestroy, err); + stream.emit(kDestroy, err) } else if (err) { - errorOrDestroy(stream, err, true); + errorOrDestroy(stream, err, true) } else { - process.nextTick(emitConstructNT, stream); + process.nextTick(emitConstructNT, stream) } } try { - stream._construct(onConstruct); + stream._construct(onConstruct) } catch (err) { - onConstruct(err); + onConstruct(err) } } function emitConstructNT(stream) { - stream.emit(kConstruct); + stream.emit(kConstruct) } function isRequest(stream) { - return stream && stream.setHeader && typeof stream.abort === 'function'; + return stream && stream.setHeader && typeof stream.abort === 'function' } function emitCloseLegacy(stream) { - stream.emit('close'); + stream.emit('close') } function emitErrorCloseLegacy(stream, err) { - stream.emit('error', err); - process.nextTick(emitCloseLegacy, stream); -} + stream.emit('error', err) + process.nextTick(emitCloseLegacy, stream) +} // Normalize destroy for legacy. -// Normalize destroy for legacy. function destroyer(stream, err) { if (!stream || isDestroyed(stream)) { - return; + return } if (!err && !isFinished(stream)) { - err = new AbortError(); - } + err = new AbortError() + } // TODO: Remove isRequest branches. - // TODO: Remove isRequest branches. if (isServerRequest(stream)) { - stream.socket = null; - stream.destroy(err); + stream.socket = null + stream.destroy(err) } else if (isRequest(stream)) { - stream.abort(); + stream.abort() } else if (isRequest(stream.req)) { - stream.req.abort(); + stream.req.abort() } else if (typeof stream.destroy === 'function') { - stream.destroy(err); + stream.destroy(err) } else if (typeof stream.close === 'function') { // TODO: Don't lose err? - stream.close(); + stream.close() } else if (err) { - process.nextTick(emitErrorCloseLegacy, stream); + process.nextTick(emitErrorCloseLegacy, stream) } else { - process.nextTick(emitCloseLegacy, stream); + process.nextTick(emitCloseLegacy, stream) } if (!stream.destroyed) { - stream[kDestroyed] = true; + stream[kDestroyed] = true } } @@ -335,4 +329,4 @@ module.exports = { destroy, undestroy, errorOrDestroy -}; +} diff --git a/lib/internal/streams/duplex.js b/lib/internal/streams/duplex.js index 9588ddc58e..a0d6f6bd73 100644 --- a/lib/internal/streams/duplex.js +++ b/lib/internal/streams/duplex.js @@ -18,129 +18,111 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototype inheritance, this class // prototypically inherits from Readable, and then parasitically from // Writable. - -'use strict'; +'use strict' const { ObjectDefineProperties, ObjectGetOwnPropertyDescriptor, ObjectKeys, - ObjectSetPrototypeOf, -} = require('../../ours/primordials'); + ObjectSetPrototypeOf +} = require('../../ours/primordials') -module.exports = Duplex; +module.exports = Duplex -const Readable = require('./readable'); -const Writable = require('./writable'); +const Readable = require('./readable') -ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); -ObjectSetPrototypeOf(Duplex, Readable); +const Writable = require('./writable') +ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype) +ObjectSetPrototypeOf(Duplex, Readable) { - const keys = ObjectKeys(Writable.prototype); - // Allow the keys array to be GC'ed. + const keys = ObjectKeys(Writable.prototype) // Allow the keys array to be GC'ed. + for (let i = 0; i < keys.length; i++) { - const method = keys[i]; - if (!Duplex.prototype[method]) - Duplex.prototype[method] = Writable.prototype[method]; + const method = keys[i] + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method] } } function Duplex(options) { - if (!(this instanceof Duplex)) - return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); + if (!(this instanceof Duplex)) return new Duplex(options) + Readable.call(this, options) + Writable.call(this, options) if (options) { - this.allowHalfOpen = options.allowHalfOpen !== false; + this.allowHalfOpen = options.allowHalfOpen !== false if (options.readable === false) { - this._readableState.readable = false; - this._readableState.ended = true; - this._readableState.endEmitted = true; + this._readableState.readable = false + this._readableState.ended = true + this._readableState.endEmitted = true } if (options.writable === false) { - this._writableState.writable = false; - this._writableState.ending = true; - this._writableState.ended = true; - this._writableState.finished = true; + this._writableState.writable = false + this._writableState.ending = true + this._writableState.ended = true + this._writableState.finished = true } } else { - this.allowHalfOpen = true; + this.allowHalfOpen = true } } ObjectDefineProperties(Duplex.prototype, { - writable: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable'), - writableHighWaterMark: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark'), - writableObjectMode: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode'), - writableBuffer: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer'), - writableLength: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength'), - writableFinished: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished'), - writableCorked: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked'), - writableEnded: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded'), - writableNeedDrain: - ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain'), - + writable: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable'), + writableHighWaterMark: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark'), + writableObjectMode: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode'), + writableBuffer: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer'), + writableLength: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength'), + writableFinished: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished'), + writableCorked: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked'), + writableEnded: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded'), + writableNeedDrain: ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain'), destroyed: { get() { - if (this._readableState === undefined || - this._writableState === undefined) { - return false; + if (this._readableState === undefined || this._writableState === undefined) { + return false } - return this._readableState.destroyed && this._writableState.destroyed; + + return this._readableState.destroyed && this._writableState.destroyed }, + set(value) { // Backward compatibility, the user is explicitly // managing destroyed. if (this._readableState && this._writableState) { - this._readableState.destroyed = value; - this._writableState.destroyed = value; + this._readableState.destroyed = value + this._writableState.destroyed = value } } } -}); - -let webStreamsAdapters; +}) +let webStreamsAdapters // Lazy to avoid circular references -// Lazy to avoid circular references function lazyWebStreams() { - if (webStreamsAdapters === undefined) - webStreamsAdapters = {}; - return webStreamsAdapters; + if (webStreamsAdapters === undefined) webStreamsAdapters = {} + return webStreamsAdapters } -Duplex.fromWeb = function(pair, options) { - return lazyWebStreams().newStreamDuplexFromReadableWritablePair( - pair, - options); -}; +Duplex.fromWeb = function (pair, options) { + return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options) +} -Duplex.toWeb = function(duplex) { - return lazyWebStreams().newReadableWritablePairFromDuplex(duplex); -}; +Duplex.toWeb = function (duplex) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex) +} -let duplexify; +let duplexify -Duplex.from = function(body) { +Duplex.from = function (body) { if (!duplexify) { - duplexify = require('./duplexify'); + duplexify = require('./duplexify') } - return duplexify(body, 'body'); -}; + + return duplexify(body, 'body') +} diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js index 0d90dc9405..085729cfde 100644 --- a/lib/internal/streams/duplexify.js +++ b/lib/internal/streams/duplexify.js @@ -1,4 +1,4 @@ -'use strict'; +'use strict' const { isReadable, @@ -7,94 +7,90 @@ const { isNodeStream, isReadableNodeStream, isWritableNodeStream, - isDuplexNodeStream, -} = require('./utils'); -const eos = require('./end-of-stream'); + isDuplexNodeStream +} = require('./utils') + +const eos = require('./end-of-stream') + const { AbortError, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_INVALID_RETURN_VALUE, - }, -} = require('../../ours/errors'); -const { destroyer } = require('./destroy'); -const Duplex = require('./duplex'); -const Readable = require('./readable'); -const { createDeferredPromise } = require('../../ours/util'); -const from = require('./from'); + codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE } +} = require('../../ours/errors') -const { - isBlob, -} = - { - isBlob(b) { - return b instanceof Blob - } - } +const { destroyer } = require('./destroy') - const { Blob } = require('buffer'); - +const Duplex = require('./duplex') - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; - } - +const Readable = require('./readable') -const { - FunctionPrototypeCall -} = require('../../ours/primordials'); +const { createDeferredPromise } = require('../../ours/util') + +const from = require('./from') + +const { isBlob } = { + isBlob(b) { + return b instanceof Blob + } +} + +if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController +} + +const { FunctionPrototypeCall } = require('../../ours/primordials') // This is needed for pre node 17. -// This is needed for pre node 17. class Duplexify extends Duplex { constructor(options) { - super(options); + super(options) // https://github.com/nodejs/node/pull/34385 - // https://github.com/nodejs/node/pull/34385 - - if (options?.readable === false) { - this._readableState.readable = false; - this._readableState.ended = true; - this._readableState.endEmitted = true; + if ((options === null || options === undefined ? undefined : options.readable) === false) { + this._readableState.readable = false + this._readableState.ended = true + this._readableState.endEmitted = true } - if (options?.writable === false) { - this._writableState.writable = false; - this._writableState.ending = true; - this._writableState.ended = true; - this._writableState.finished = true; + if ((options === null || options === undefined ? undefined : options.writable) === false) { + this._writableState.writable = false + this._writableState.ending = true + this._writableState.ended = true + this._writableState.finished = true } } } module.exports = function duplexify(body, name) { if (isDuplexNodeStream(body)) { - return body; + return body } if (isReadableNodeStream(body)) { - return _duplexify({ readable: body }); + return _duplexify({ + readable: body + }) } if (isWritableNodeStream(body)) { - return _duplexify({ writable: body }); + return _duplexify({ + writable: body + }) } if (isNodeStream(body)) { - return _duplexify({ writable: false, readable: false }); - } - - // TODO: Webstreams + return _duplexify({ + writable: false, + readable: false + }) + } // TODO: Webstreams // if (isReadableStream(body)) { // return _duplexify({ readable: Readable.fromWeb(body) }); // } - // TODO: Webstreams // if (isWritableStream(body)) { // return _duplexify({ writable: Writable.fromWeb(body) }); // } if (typeof body === 'function') { - const { value, write, final, destroy } = fromAsyncGen(body); + const { value, write, final, destroy } = fromAsyncGen(body) if (isIterable(value)) { return from(Duplexify, value, { @@ -103,51 +99,51 @@ module.exports = function duplexify(body, name) { write, final, destroy - }); + }) } - const then = value?.then; - if (typeof then === 'function') { - let d; + const then = value === null || value === undefined ? undefined : value.then + if (typeof then === 'function') { + let d const promise = FunctionPrototypeCall( then, value, (val) => { if (val != null) { - throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val); + throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val) } }, (err) => { - destroyer(d, err); + destroyer(d, err) } - ); - - return d = new Duplexify({ + ) + return (d = new Duplexify({ // TODO (ronag): highWaterMark? objectMode: true, readable: false, write, + final(cb) { final(async () => { try { - await promise; - process.nextTick(cb, null); + await promise + process.nextTick(cb, null) } catch (err) { - process.nextTick(cb, err); + process.nextTick(cb, err) } - }); + }) }, + destroy - }); + })) } - throw new ERR_INVALID_RETURN_VALUE( - 'Iterable, AsyncIterable or AsyncFunction', name, value); + throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value) } if (isBlob(body)) { - return duplexify(body.arrayBuffer()); + return duplexify(body.arrayBuffer()) } if (isIterable(body)) { @@ -155,10 +151,8 @@ module.exports = function duplexify(body, name) { // TODO (ronag): highWaterMark? objectMode: true, writable: false - }); - } - - // TODO: Webstreams. + }) + } // TODO: Webstreams. // if ( // isReadableStream(body?.readable) && // isWritableStream(body?.writable) @@ -167,222 +161,257 @@ module.exports = function duplexify(body, name) { // } if ( - typeof body?.writable === 'object' || - typeof body?.readable === 'object' + typeof (body === null || body === undefined ? undefined : body.writable) === 'object' || + typeof (body === null || body === undefined ? undefined : body.readable) === 'object' ) { - const readable = body?.readable ? - isReadableNodeStream(body?.readable) ? body?.readable : - duplexify(body.readable) : - undefined; - - const writable = body?.writable ? - isWritableNodeStream(body?.writable) ? body?.writable : - duplexify(body.writable) : - undefined; - - return _duplexify({ readable, writable }); + const readable = + body !== null && body !== undefined && body.readable + ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable) + ? body === null || body === undefined + ? undefined + : body.readable + : duplexify(body.readable) + : undefined + const writable = + body !== null && body !== undefined && body.writable + ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable) + ? body === null || body === undefined + ? undefined + : body.writable + : duplexify(body.writable) + : undefined + return _duplexify({ + readable, + writable + }) } - const then = body?.then; - if (typeof then === 'function') { - let d; + const then = body === null || body === undefined ? undefined : body.then + if (typeof then === 'function') { + let d FunctionPrototypeCall( then, body, (val) => { if (val != null) { - d.push(val); + d.push(val) } - d.push(null); + + d.push(null) }, (err) => { - destroyer(d, err); + destroyer(d, err) } - ); - - return d = new Duplexify({ + ) + return (d = new Duplexify({ objectMode: true, writable: false, + read() {} - }); + })) } throw new ERR_INVALID_ARG_TYPE( name, - ['Blob', 'ReadableStream', 'WritableStream', 'Stream', 'Iterable', - 'AsyncIterable', 'Function', '{ readable, writable } pair', 'Promise'], - body); -}; + [ + 'Blob', + 'ReadableStream', + 'WritableStream', + 'Stream', + 'Iterable', + 'AsyncIterable', + 'Function', + '{ readable, writable } pair', + 'Promise' + ], + body + ) +} function fromAsyncGen(fn) { - let { promise, resolve } = createDeferredPromise(); - const ac = new AbortController(); - const signal = ac.signal; - const value = fn(async function*() { - while (true) { - const _promise = promise; - promise = null; - const { chunk, done, cb } = await _promise; - process.nextTick(cb); - if (done) return; - if (signal.aborted) - throw new AbortError(undefined, { cause: signal.reason }); - ({ promise, resolve } = createDeferredPromise()); - yield chunk; + let { promise, resolve } = createDeferredPromise() + const ac = new AbortController() + const signal = ac.signal + const value = fn( + (async function* () { + while (true) { + const _promise = promise + promise = null + const { chunk, done, cb } = await _promise + process.nextTick(cb) + if (done) return + if (signal.aborted) + throw new AbortError(undefined, { + cause: signal.reason + }) + ;({ promise, resolve } = createDeferredPromise()) + yield chunk + } + })(), + { + signal } - }(), { signal }); - + ) return { value, + write(chunk, encoding, cb) { - const _resolve = resolve; - resolve = null; - _resolve({ chunk, done: false, cb }); + const _resolve = resolve + resolve = null + + _resolve({ + chunk, + done: false, + cb + }) }, + final(cb) { - const _resolve = resolve; - resolve = null; - _resolve({ done: true, cb }); + const _resolve = resolve + resolve = null + + _resolve({ + done: true, + cb + }) }, + destroy(err, cb) { - ac.abort(); - cb(err); + ac.abort() + cb(err) } - }; + } } function _duplexify(pair) { - const r = pair.readable && typeof pair.readable.read !== 'function' ? - Readable.wrap(pair.readable) : pair.readable; - const w = pair.writable; - - let readable = !!isReadable(r); - let writable = !!isWritable(w); - - let ondrain; - let onfinish; - let onreadable; - let onclose; - let d; + const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable + const w = pair.writable + let readable = !!isReadable(r) + let writable = !!isWritable(w) + let ondrain + let onfinish + let onreadable + let onclose + let d function onfinished(err) { - const cb = onclose; - onclose = null; + const cb = onclose + onclose = null if (cb) { - cb(err); + cb(err) } else if (err) { - d.destroy(err); + d.destroy(err) } else if (!readable && !writable) { - d.destroy(); + d.destroy() } - } - - // TODO(ronag): Avoid double buffering. + } // TODO(ronag): Avoid double buffering. // Implement Writable/Readable/Duplex traits. // See, https://github.com/nodejs/node/pull/33515. + d = new Duplexify({ // TODO (ronag): highWaterMark? - readableObjectMode: !!r?.readableObjectMode, - writableObjectMode: !!w?.writableObjectMode, + readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode), + writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode), readable, - writable, - }); + writable + }) if (writable) { eos(w, (err) => { - writable = false; + writable = false + if (err) { - destroyer(r, err); + destroyer(r, err) } - onfinished(err); - }); - d._write = function(chunk, encoding, callback) { + onfinished(err) + }) + + d._write = function (chunk, encoding, callback) { if (w.write(chunk, encoding)) { - callback(); + callback() } else { - ondrain = callback; + ondrain = callback } - }; + } - d._final = function(callback) { - w.end(); - onfinish = callback; - }; + d._final = function (callback) { + w.end() + onfinish = callback + } - w.on('drain', function() { + w.on('drain', function () { if (ondrain) { - const cb = ondrain; - ondrain = null; - cb(); + const cb = ondrain + ondrain = null + cb() } - }); - - w.on('finish', function() { + }) + w.on('finish', function () { if (onfinish) { - const cb = onfinish; - onfinish = null; - cb(); + const cb = onfinish + onfinish = null + cb() } - }); + }) } if (readable) { eos(r, (err) => { - readable = false; + readable = false + if (err) { - destroyer(r, err); + destroyer(r, err) } - onfinished(err); - }); - r.on('readable', function() { + onfinished(err) + }) + r.on('readable', function () { if (onreadable) { - const cb = onreadable; - onreadable = null; - cb(); + const cb = onreadable + onreadable = null + cb() } - }); + }) + r.on('end', function () { + d.push(null) + }) - r.on('end', function() { - d.push(null); - }); - - d._read = function() { + d._read = function () { while (true) { - const buf = r.read(); + const buf = r.read() if (buf === null) { - onreadable = d._read; - return; + onreadable = d._read + return } if (!d.push(buf)) { - return; + return } } - }; + } } - d._destroy = function(err, callback) { + d._destroy = function (err, callback) { if (!err && onclose !== null) { - err = new AbortError(); + err = new AbortError() } - onreadable = null; - ondrain = null; - onfinish = null; + onreadable = null + ondrain = null + onfinish = null if (onclose === null) { - callback(err); + callback(err) } else { - onclose = callback; - destroyer(w, err); - destroyer(r, err); + onclose = callback + destroyer(w, err) + destroyer(r, err) } - }; + } - return d; + return d } diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js index 488f4fbe97..3bafd53b4c 100644 --- a/lib/internal/streams/end-of-stream.js +++ b/lib/internal/streams/end-of-stream.js @@ -1,24 +1,16 @@ // Ported from https://github.com/mafintosh/end-of-stream with // permission from the author, Mathias Buus (@mafintosh). +'use strict' -'use strict'; +const { AbortError, codes } = require('../../ours/errors') -const { - AbortError, - codes, -} = require('../../ours/errors'); -const { - ERR_INVALID_ARG_TYPE, - ERR_STREAM_PREMATURE_CLOSE -} = codes; -const { once } = require('../../ours/util'); -const { - validateAbortSignal, - validateFunction, - validateObject, -} = require('../validators'); +const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes + +const { once } = require('../../ours/util') -const { Promise } = require('../../ours/primordials'); +const { validateAbortSignal, validateFunction, validateObject } = require('../validators') + +const { Promise } = require('../../ours/primordials') const { isClosed, @@ -31,225 +23,236 @@ const { isWritableFinished, isWritableErrored, isNodeStream, - willEmitClose: _willEmitClose, -} = require('./utils'); + willEmitClose: _willEmitClose +} = require('./utils') function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; + return stream.setHeader && typeof stream.abort === 'function' } -const nop = () => {}; +const nop = () => {} function eos(stream, options, callback) { + var _options$readable, _options$writable + if (arguments.length === 2) { - callback = options; - options = {}; + callback = options + options = {} } else if (options == null) { - options = {}; + options = {} } else { - validateObject(options, 'options'); + validateObject(options, 'options') } - validateFunction(callback, 'callback'); - validateAbortSignal(options.signal, 'options.signal'); - callback = once(callback); - - const readable = options.readable ?? isReadableNodeStream(stream); - const writable = options.writable ?? isWritableNodeStream(stream); + validateFunction(callback, 'callback') + validateAbortSignal(options.signal, 'options.signal') + callback = once(callback) + const readable = + (_options$readable = options.readable) !== null && _options$readable !== undefined + ? _options$readable + : isReadableNodeStream(stream) + const writable = + (_options$writable = options.writable) !== null && _options$writable !== undefined + ? _options$writable + : isWritableNodeStream(stream) if (!isNodeStream(stream)) { // TODO: Webstreams. - throw new ERR_INVALID_ARG_TYPE('stream', 'Stream', stream); + throw new ERR_INVALID_ARG_TYPE('stream', 'Stream', stream) } - const wState = stream._writableState; - const rState = stream._readableState; + const wState = stream._writableState + const rState = stream._readableState const onlegacyfinish = () => { if (!stream.writable) { - onfinish(); + onfinish() } - }; - - // TODO (ronag): Improve soft detection to include core modules and + } // TODO (ronag): Improve soft detection to include core modules and // common ecosystem modules that do properly emit 'close' but fail // this generic check. - let willEmitClose = ( - _willEmitClose(stream) && - isReadableNodeStream(stream) === readable && - isWritableNodeStream(stream) === writable - ); - let writableFinished = isWritableFinished(stream, false); + let willEmitClose = + _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable + let writableFinished = isWritableFinished(stream, false) + const onfinish = () => { - writableFinished = true; - // Stream should not be destroyed here. If it is that + writableFinished = true // Stream should not be destroyed here. If it is that // means that user space is doing something differently and // we cannot trust willEmitClose. + if (stream.destroyed) { - willEmitClose = false; + willEmitClose = false } if (willEmitClose && (!stream.readable || readable)) { - return; + return } if (!readable || readableFinished) { - callback.call(stream); + callback.call(stream) } - }; + } + + let readableFinished = isReadableFinished(stream, false) - let readableFinished = isReadableFinished(stream, false); const onend = () => { - readableFinished = true; - // Stream should not be destroyed here. If it is that + readableFinished = true // Stream should not be destroyed here. If it is that // means that user space is doing something differently and // we cannot trust willEmitClose. + if (stream.destroyed) { - willEmitClose = false; + willEmitClose = false } if (willEmitClose && (!stream.writable || writable)) { - return; + return } if (!writable || writableFinished) { - callback.call(stream); + callback.call(stream) } - }; + } const onerror = (err) => { - callback.call(stream, err); - }; + callback.call(stream, err) + } - let closed = isClosed(stream); + let closed = isClosed(stream) const onclose = () => { - closed = true; - - const errored = isWritableErrored(stream) || isReadableErrored(stream); + closed = true + const errored = isWritableErrored(stream) || isReadableErrored(stream) if (errored && typeof errored !== 'boolean') { - return callback.call(stream, errored); + return callback.call(stream, errored) } if (readable && !readableFinished && isReadableNodeStream(stream, true)) { - if (!isReadableFinished(stream, false)) - return callback.call(stream, - new ERR_STREAM_PREMATURE_CLOSE()); + if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) } + if (writable && !writableFinished) { - if (!isWritableFinished(stream, false)) - return callback.call(stream, - new ERR_STREAM_PREMATURE_CLOSE()); + if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) } - callback.call(stream); - }; + callback.call(stream) + } const onrequest = () => { - stream.req.on('finish', onfinish); - }; + stream.req.on('finish', onfinish) + } if (isRequest(stream)) { - stream.on('complete', onfinish); + stream.on('complete', onfinish) + if (!willEmitClose) { - stream.on('abort', onclose); + stream.on('abort', onclose) } + if (stream.req) { - onrequest(); + onrequest() } else { - stream.on('request', onrequest); + stream.on('request', onrequest) } - } else if (writable && !wState) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } + } else if (writable && !wState) { + // legacy streams + stream.on('end', onlegacyfinish) + stream.on('close', onlegacyfinish) + } // Not all streams will emit 'close' after 'aborted'. - // Not all streams will emit 'close' after 'aborted'. if (!willEmitClose && typeof stream.aborted === 'boolean') { - stream.on('aborted', onclose); + stream.on('aborted', onclose) } - stream.on('end', onend); - stream.on('finish', onfinish); + stream.on('end', onend) + stream.on('finish', onfinish) + if (options.error !== false) { - stream.on('error', onerror); + stream.on('error', onerror) } - stream.on('close', onclose); + + stream.on('close', onclose) if (closed) { - process.nextTick(onclose); - } else if (wState?.errorEmitted || rState?.errorEmitted) { + process.nextTick(onclose) + } else if ( + (wState !== null && wState !== undefined && wState.errorEmitted) || + (rState !== null && rState !== undefined && rState.errorEmitted) + ) { if (!willEmitClose) { - process.nextTick(onclose); + process.nextTick(onclose) } } else if ( !readable && (!willEmitClose || isReadable(stream)) && (writableFinished || isWritable(stream) === false) ) { - process.nextTick(onclose); + process.nextTick(onclose) } else if ( !writable && (!willEmitClose || isWritable(stream)) && (readableFinished || isReadable(stream) === false) ) { - process.nextTick(onclose); - } else if ((rState && stream.req && stream.aborted)) { - process.nextTick(onclose); + process.nextTick(onclose) + } else if (rState && stream.req && stream.aborted) { + process.nextTick(onclose) } const cleanup = () => { - callback = nop; - stream.removeListener('aborted', onclose); - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; + callback = nop + stream.removeListener('aborted', onclose) + stream.removeListener('complete', onfinish) + stream.removeListener('abort', onclose) + stream.removeListener('request', onrequest) + if (stream.req) stream.req.removeListener('finish', onfinish) + stream.removeListener('end', onlegacyfinish) + stream.removeListener('close', onlegacyfinish) + stream.removeListener('finish', onfinish) + stream.removeListener('end', onend) + stream.removeListener('error', onerror) + stream.removeListener('close', onclose) + } if (options.signal && !closed) { const abort = () => { // Keep it because cleanup removes it. - const endCallback = callback; - cleanup(); + const endCallback = callback + cleanup() endCallback.call( stream, - new AbortError(undefined, { cause: options.signal.reason })); - }; + new AbortError(undefined, { + cause: options.signal.reason + }) + ) + } + if (options.signal.aborted) { - process.nextTick(abort); + process.nextTick(abort) } else { - const originalCallback = callback; + const originalCallback = callback callback = once((...args) => { - options.signal.removeEventListener('abort', abort); - originalCallback.apply(stream, args); - }); - options.signal.addEventListener('abort', abort); + options.signal.removeEventListener('abort', abort) + originalCallback.apply(stream, args) + }) + options.signal.addEventListener('abort', abort) } } - return cleanup; + return cleanup } function finished(stream, opts) { return new Promise((resolve, reject) => { eos(stream, opts, (err) => { if (err) { - reject(err); + reject(err) } else { - resolve(); + resolve() } - }); - }); + }) + }) } -module.exports = eos; -module.exports.finished = finished; +module.exports = eos +module.exports.finished = finished diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js index 051d563b69..93fa992888 100644 --- a/lib/internal/streams/from.js +++ b/lib/internal/streams/from.js @@ -1,113 +1,110 @@ -'use strict'; +'use strict' -const { - PromisePrototypeThen, - SymbolAsyncIterator, - SymbolIterator, -} = require('../../ours/primordials'); -const { Buffer } = require('buffer'); +const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials') -const { - ERR_INVALID_ARG_TYPE, - ERR_STREAM_NULL_VALUES -} = require('../../ours/errors').codes; +const { Buffer } = require('buffer') + +const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes function from(Readable, iterable, opts) { - let iterator; + let iterator + if (typeof iterable === 'string' || iterable instanceof Buffer) { return new Readable({ objectMode: true, ...opts, + read() { - this.push(iterable); - this.push(null); + this.push(iterable) + this.push(null) } - }); + }) } - let isAsync; + let isAsync + if (iterable && iterable[SymbolAsyncIterator]) { - isAsync = true; - iterator = iterable[SymbolAsyncIterator](); + isAsync = true + iterator = iterable[SymbolAsyncIterator]() } else if (iterable && iterable[SymbolIterator]) { - isAsync = false; - iterator = iterable[SymbolIterator](); + isAsync = false + iterator = iterable[SymbolIterator]() } else { - throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable) } const readable = new Readable({ objectMode: true, highWaterMark: 1, // TODO(ronag): What options should be allowed? - ...opts, - }); - - // Flag to protect against _read + ...opts + }) // Flag to protect against _read // being called before last iteration completion. - let reading = false; - readable._read = function() { + let reading = false + + readable._read = function () { if (!reading) { - reading = true; - next(); + reading = true + next() } - }; + } - readable._destroy = function(error, cb) { + readable._destroy = function (error, cb) { PromisePrototypeThen( close(error), () => process.nextTick(cb, error), // nextTick is here in case cb throws - (e) => process.nextTick(cb, e || error), - ); - }; + (e) => process.nextTick(cb, e || error) + ) + } async function close(error) { - const hadError = (error !== undefined) && (error !== null); - const hasThrow = typeof iterator.throw === 'function'; + const hadError = error !== undefined && error !== null + const hasThrow = typeof iterator.throw === 'function' + if (hadError && hasThrow) { - const { value, done } = await iterator.throw(error); - await value; + const { value, done } = await iterator.throw(error) + await value + if (done) { - return; + return } } + if (typeof iterator.return === 'function') { - const { value } = await iterator.return(); - await value; + const { value } = await iterator.return() + await value } } async function next() { for (;;) { try { - const { value, done } = isAsync ? - await iterator.next() : - iterator.next(); + const { value, done } = isAsync ? await iterator.next() : iterator.next() if (done) { - readable.push(null); + readable.push(null) } else { - const res = (value && - typeof value.then === 'function') ? - await value : - value; + const res = value && typeof value.then === 'function' ? await value : value + if (res === null) { - reading = false; - throw new ERR_STREAM_NULL_VALUES(); + reading = false + throw new ERR_STREAM_NULL_VALUES() } else if (readable.push(res)) { - continue; + continue } else { - reading = false; + reading = false } } } catch (err) { - readable.destroy(err); + readable.destroy(err) } - break; + + break } } - return readable; + + return readable } -module.exports = from; +module.exports = from diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js index d6a7632b82..12bdff88b9 100644 --- a/lib/internal/streams/lazy_transform.js +++ b/lib/internal/streams/lazy_transform.js @@ -1,50 +1,45 @@ // LazyTransform is a special type of Transform stream that is lazily loaded. // This is used for performance with bi-API-ship: when two APIs are available // for the stream, one conventional and one non-conventional. -'use strict'; +'use strict' -const { - ObjectDefineProperties, - ObjectDefineProperty, - ObjectSetPrototypeOf, -} = require('../../ours/primordials'); +const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials') -const stream = require('stream'); +const stream = require('stream') -const { - getDefaultEncoding -} = require('../crypto/util'); +const { getDefaultEncoding } = require('../crypto/util') -module.exports = LazyTransform; +module.exports = LazyTransform function LazyTransform(options) { - this._options = options; + this._options = options } -ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype); -ObjectSetPrototypeOf(LazyTransform, stream.Transform); + +ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype) +ObjectSetPrototypeOf(LazyTransform, stream.Transform) function makeGetter(name) { - return function() { - stream.Transform.call(this, this._options); - this._writableState.decodeStrings = false; + return function () { + stream.Transform.call(this, this._options) + this._writableState.decodeStrings = false if (!this._options || !this._options.defaultEncoding) { - this._writableState.defaultEncoding = getDefaultEncoding(); + this._writableState.defaultEncoding = getDefaultEncoding() } - return this[name]; - }; + return this[name] + } } function makeSetter(name) { - return function(val) { + return function (val) { ObjectDefineProperty(this, name, { value: val, enumerable: true, configurable: true, writable: true - }); - }; + }) + } } ObjectDefineProperties(LazyTransform.prototype, { @@ -60,4 +55,4 @@ ObjectDefineProperties(LazyTransform.prototype, { configurable: true, enumerable: true } -}); +}) diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js index a618a86991..3c95468704 100644 --- a/lib/internal/streams/legacy.js +++ b/lib/internal/streams/legacy.js @@ -1,114 +1,100 @@ -'use strict'; +'use strict' -const { - ArrayIsArray, - ObjectSetPrototypeOf, -} = require('../../ours/primordials'); +const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials') -const EE = require('events'); +const EE = require('events') function Stream(opts) { - EE.call(this, opts); + EE.call(this, opts) } -ObjectSetPrototypeOf(Stream.prototype, EE.prototype); -ObjectSetPrototypeOf(Stream, EE); -Stream.prototype.pipe = function(dest, options) { - const source = this; +ObjectSetPrototypeOf(Stream.prototype, EE.prototype) +ObjectSetPrototypeOf(Stream, EE) + +Stream.prototype.pipe = function (dest, options) { + const source = this function ondata(chunk) { if (dest.writable && dest.write(chunk) === false && source.pause) { - source.pause(); + source.pause() } } - source.on('data', ondata); + source.on('data', ondata) function ondrain() { if (source.readable && source.resume) { - source.resume(); + source.resume() } } - dest.on('drain', ondrain); - - // If the 'end' option is not supplied, dest.end() will be called when + dest.on('drain', ondrain) // If the 'end' option is not supplied, dest.end() will be called when // source gets the 'end' or 'close' events. Only dest.end() once. + if (!dest._isStdio && (!options || options.end !== false)) { - source.on('end', onend); - source.on('close', onclose); + source.on('end', onend) + source.on('close', onclose) } - let didOnEnd = false; - function onend() { - if (didOnEnd) return; - didOnEnd = true; + let didOnEnd = false - dest.end(); + function onend() { + if (didOnEnd) return + didOnEnd = true + dest.end() } - function onclose() { - if (didOnEnd) return; - didOnEnd = true; - - if (typeof dest.destroy === 'function') dest.destroy(); - } + if (didOnEnd) return + didOnEnd = true + if (typeof dest.destroy === 'function') dest.destroy() + } // Don't leave dangling pipes when there are errors. - // Don't leave dangling pipes when there are errors. function onerror(er) { - cleanup(); + cleanup() + if (EE.listenerCount(this, 'error') === 0) { - this.emit('error', er); + this.emit('error', er) } } - prependListener(source, 'error', onerror); - prependListener(dest, 'error', onerror); + prependListener(source, 'error', onerror) + prependListener(dest, 'error', onerror) // Remove all the event listeners that were added. - // Remove all the event listeners that were added. function cleanup() { - source.removeListener('data', ondata); - dest.removeListener('drain', ondrain); - - source.removeListener('end', onend); - source.removeListener('close', onclose); - - source.removeListener('error', onerror); - dest.removeListener('error', onerror); - - source.removeListener('end', cleanup); - source.removeListener('close', cleanup); - - dest.removeListener('close', cleanup); + source.removeListener('data', ondata) + dest.removeListener('drain', ondrain) + source.removeListener('end', onend) + source.removeListener('close', onclose) + source.removeListener('error', onerror) + dest.removeListener('error', onerror) + source.removeListener('end', cleanup) + source.removeListener('close', cleanup) + dest.removeListener('close', cleanup) } - source.on('end', cleanup); - source.on('close', cleanup); - - dest.on('close', cleanup); - dest.emit('pipe', source); + source.on('end', cleanup) + source.on('close', cleanup) + dest.on('close', cleanup) + dest.emit('pipe', source) // Allow for unix-like usage: A.pipe(B).pipe(C) - // Allow for unix-like usage: A.pipe(B).pipe(C) - return dest; -}; + return dest +} function prependListener(emitter, event, fn) { // Sadly this is not cacheable as some libraries bundle their own // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') - return emitter.prependListener(event, fn); - - // This is a hack to make sure that our error handler is attached before any + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn) // This is a hack to make sure that our error handler is attached before any // userland ones. NEVER DO THIS. This is here only because this code needs // to continue to work with older versions of Node.js that do not include // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) - emitter.on(event, fn); - else if (ArrayIsArray(emitter._events[event])) - emitter._events[event].unshift(fn); - else - emitter._events[event] = [fn, emitter._events[event]]; + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn) + else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn) + else emitter._events[event] = [fn, emitter._events[event]] } -module.exports = { Stream, prependListener }; +module.exports = { + Stream, + prependListener +} diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js index 3fb669f73a..cf0f7b3b9a 100644 --- a/lib/internal/streams/operators.js +++ b/lib/internal/streams/operators.js @@ -1,26 +1,19 @@ -'use strict'; +'use strict' +if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController +} - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; - } - - -const { - codes: { - ERR_INVALID_ARG_TYPE, - ERR_MISSING_ARGS, - ERR_OUT_OF_RANGE, - }, - AbortError, -} = require('../../ours/errors'); const { - validateAbortSignal, - validateInteger, - validateObject, -} = require('../validators'); -const kWeakHandler = require('../../ours/primordials').Symbol('kWeak'); -const { finished } = require('./end-of-stream'); + codes: { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, + AbortError +} = require('../../ours/errors') + +const { validateAbortSignal, validateInteger, validateObject } = require('../validators') + +const kWeakHandler = require('../../ours/primordials').Symbol('kWeak') + +const { finished } = require('./end-of-stream') const { ArrayPrototypePush, @@ -30,364 +23,500 @@ const { Promise, PromiseReject, PromisePrototypeCatch, - Symbol, -} = require('../../ours/primordials'); + Symbol +} = require('../../ours/primordials') -const kEmpty = Symbol('kEmpty'); -const kEof = Symbol('kEof'); +const kEmpty = Symbol('kEmpty') +const kEof = Symbol('kEof') function map(fn, options) { if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'fn', ['Function', 'AsyncFunction'], fn); + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) } + if (options != null) { - validateObject(options, 'options'); - } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); + validateObject(options, 'options') } - let concurrency = 1; - if (options?.concurrency != null) { - concurrency = MathFloor(options.concurrency); + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - validateInteger(concurrency, 'concurrency', 1); + let concurrency = 1 + + if ((options === null || options === undefined ? undefined : options.concurrency) != null) { + concurrency = MathFloor(options.concurrency) + } + validateInteger(concurrency, 'concurrency', 1) return async function* map() { - const ac = new AbortController(); - const stream = this; - const queue = []; - const signal = ac.signal; - const signalOpt = { signal }; - - const abort = () => ac.abort(); - if (options?.signal?.aborted) { - abort(); + var _options$signal, _options$signal2 + + const ac = new AbortController() + const stream = this + const queue = [] + const signal = ac.signal + const signalOpt = { + signal } - options?.signal?.addEventListener('abort', abort); + const abort = () => ac.abort() + + if ( + options !== null && + options !== undefined && + (_options$signal = options.signal) !== null && + _options$signal !== undefined && + _options$signal.aborted + ) { + abort() + } - let next; - let resume; - let done = false; + options === null || options === undefined + ? undefined + : (_options$signal2 = options.signal) === null || _options$signal2 === undefined + ? undefined + : _options$signal2.addEventListener('abort', abort) + let next + let resume + let done = false function onDone() { - done = true; + done = true } async function pump() { try { for await (let val of stream) { + var _val + if (done) { - return; + return } if (signal.aborted) { - throw new AbortError(); + throw new AbortError() } try { - val = fn(val, signalOpt); + val = fn(val, signalOpt) } catch (err) { - val = PromiseReject(err); + val = PromiseReject(err) } if (val === kEmpty) { - continue; + continue } - if (typeof val?.catch === 'function') { - val.catch(onDone); + if (typeof ((_val = val) === null || _val === undefined ? undefined : _val.catch) === 'function') { + val.catch(onDone) } - queue.push(val); + queue.push(val) + if (next) { - next(); - next = null; + next() + next = null } if (!done && queue.length && queue.length >= concurrency) { await new Promise((resolve) => { - resume = resolve; - }); + resume = resolve + }) } } - queue.push(kEof); + + queue.push(kEof) } catch (err) { - const val = PromiseReject(err); - PromisePrototypeCatch(val, onDone); - queue.push(val); + const val = PromiseReject(err) + PromisePrototypeCatch(val, onDone) + queue.push(val) } finally { - done = true; + var _options$signal3 + + done = true + if (next) { - next(); - next = null; + next() + next = null } - options?.signal?.removeEventListener('abort', abort); + + options === null || options === undefined + ? undefined + : (_options$signal3 = options.signal) === null || _options$signal3 === undefined + ? undefined + : _options$signal3.removeEventListener('abort', abort) } } - pump(); + pump() try { while (true) { while (queue.length > 0) { - const val = await queue[0]; + const val = await queue[0] if (val === kEof) { - return; + return } if (signal.aborted) { - throw new AbortError(); + throw new AbortError() } if (val !== kEmpty) { - yield val; + yield val } - queue.shift(); + queue.shift() + if (resume) { - resume(); - resume = null; + resume() + resume = null } } await new Promise((resolve) => { - next = resolve; - }); + next = resolve + }) } } finally { - ac.abort(); + ac.abort() + done = true - done = true; if (resume) { - resume(); - resume = null; + resume() + resume = null } } - }.call(this); + }.call(this) } function asIndexedPairs(options = undefined) { if (options != null) { - validateObject(options, 'options'); + validateObject(options, 'options') } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); + + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } return async function* asIndexedPairs() { - let index = 0; + let index = 0 + for await (const val of this) { - if (options?.signal?.aborted) { - throw new AbortError({ cause: options.signal.reason }); + var _options$signal4 + + if ( + options !== null && + options !== undefined && + (_options$signal4 = options.signal) !== null && + _options$signal4 !== undefined && + _options$signal4.aborted + ) { + throw new AbortError({ + cause: options.signal.reason + }) } - yield [index++, val]; + + yield [index++, val] } - }.call(this); + }.call(this) } async function some(fn, options = undefined) { // eslint-disable-next-line no-unused-vars for await (const unused of filter.call(this, fn, options)) { - return true; + return true } - return false; + + return false } async function every(fn, options = undefined) { if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'fn', ['Function', 'AsyncFunction'], fn); - } - // https://en.wikipedia.org/wiki/De_Morgan%27s_laws - return !(await some.call(this, async (...args) => { - return !(await fn(...args)); - }, options)); + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) + } // https://en.wikipedia.org/wiki/De_Morgan%27s_laws + + return !(await some.call( + this, + async (...args) => { + return !(await fn(...args)) + }, + options + )) } async function find(fn, options) { for await (const result of filter.call(this, fn, options)) { - return result; + return result } - return undefined; + + return undefined } async function forEach(fn, options) { if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'fn', ['Function', 'AsyncFunction'], fn); + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) } + async function forEachFn(value, options) { - await fn(value, options); - return kEmpty; - } - // eslint-disable-next-line no-unused-vars + await fn(value, options) + return kEmpty + } // eslint-disable-next-line no-unused-vars + for await (const unused of map.call(this, forEachFn, options)); } function filter(fn, options) { if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'fn', ['Function', 'AsyncFunction'], fn); + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) } + async function filterFn(value, options) { if (await fn(value, options)) { - return value; + return value } - return kEmpty; + + return kEmpty } - return map.call(this, filterFn, options); -} -// Specific to provide better error to reduce since the argument is only + return map.call(this, filterFn, options) +} // Specific to provide better error to reduce since the argument is only // missing if the stream has no items in it - but the code is still appropriate + class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS { constructor() { - super('reduce'); - this.message = 'Reduce of an empty stream requires an initial value'; + super('reduce') + this.message = 'Reduce of an empty stream requires an initial value' } } async function reduce(reducer, initialValue, options) { + var _options$signal5 + if (typeof reducer !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'reducer', ['Function', 'AsyncFunction'], reducer); + throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer) } + if (options != null) { - validateObject(options, 'options'); + validateObject(options, 'options') } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); + + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - let hasInitialValue = arguments.length > 1; - if (options?.signal?.aborted) { - const err = new AbortError(undefined, { cause: options.signal.reason }); - this.once('error', () => {}); // The error is already propagated - await finished(this.destroy(err)); - throw err; + let hasInitialValue = arguments.length > 1 + + if ( + options !== null && + options !== undefined && + (_options$signal5 = options.signal) !== null && + _options$signal5 !== undefined && + _options$signal5.aborted + ) { + const err = new AbortError(undefined, { + cause: options.signal.reason + }) + this.once('error', () => {}) // The error is already propagated + + await finished(this.destroy(err)) + throw err } - const ac = new AbortController(); - const signal = ac.signal; - if (options?.signal) { - const opts = { once: true, [kWeakHandler]: this }; - options.signal.addEventListener('abort', () => ac.abort(), opts); + + const ac = new AbortController() + const signal = ac.signal + + if (options !== null && options !== undefined && options.signal) { + const opts = { + once: true, + [kWeakHandler]: this + } + options.signal.addEventListener('abort', () => ac.abort(), opts) } - let gotAnyItemFromStream = false; + + let gotAnyItemFromStream = false + try { for await (const value of this) { - gotAnyItemFromStream = true; - if (options?.signal?.aborted) { - throw new AbortError(); + var _options$signal6 + + gotAnyItemFromStream = true + + if ( + options !== null && + options !== undefined && + (_options$signal6 = options.signal) !== null && + _options$signal6 !== undefined && + _options$signal6.aborted + ) { + throw new AbortError() } + if (!hasInitialValue) { - initialValue = value; - hasInitialValue = true; + initialValue = value + hasInitialValue = true } else { - initialValue = await reducer(initialValue, value, { signal }); + initialValue = await reducer(initialValue, value, { + signal + }) } } + if (!gotAnyItemFromStream && !hasInitialValue) { - throw new ReduceAwareErrMissingArgs(); + throw new ReduceAwareErrMissingArgs() } } finally { - ac.abort(); + ac.abort() } - return initialValue; + + return initialValue } async function toArray(options) { if (options != null) { - validateObject(options, 'options'); + validateObject(options, 'options') } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); + + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - const result = []; + const result = [] + for await (const val of this) { - if (options?.signal?.aborted) { - throw new AbortError(undefined, { cause: options.signal.reason }); + var _options$signal7 + + if ( + options !== null && + options !== undefined && + (_options$signal7 = options.signal) !== null && + _options$signal7 !== undefined && + _options$signal7.aborted + ) { + throw new AbortError(undefined, { + cause: options.signal.reason + }) } - ArrayPrototypePush(result, val); + + ArrayPrototypePush(result, val) } - return result; + + return result } function flatMap(fn, options) { - const values = map.call(this, fn, options); + const values = map.call(this, fn, options) return async function* flatMap() { for await (const val of values) { - yield* val; + yield* val } - }.call(this); + }.call(this) } function toIntegerOrInfinity(number) { // We coerce here to align with the spec // https://github.com/tc39/proposal-iterator-helpers/issues/169 - number = Number(number); + number = Number(number) + if (NumberIsNaN(number)) { - return 0; + return 0 } + if (number < 0) { - throw new ERR_OUT_OF_RANGE('number', '>= 0', number); + throw new ERR_OUT_OF_RANGE('number', '>= 0', number) } - return number; + + return number } function drop(number, options = undefined) { if (options != null) { - validateObject(options, 'options'); + validateObject(options, 'options') } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); + + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - number = toIntegerOrInfinity(number); + number = toIntegerOrInfinity(number) return async function* drop() { - if (options?.signal?.aborted) { - throw new AbortError(); + var _options$signal8 + + if ( + options !== null && + options !== undefined && + (_options$signal8 = options.signal) !== null && + _options$signal8 !== undefined && + _options$signal8.aborted + ) { + throw new AbortError() } + for await (const val of this) { - if (options?.signal?.aborted) { - throw new AbortError(); + var _options$signal9 + + if ( + options !== null && + options !== undefined && + (_options$signal9 = options.signal) !== null && + _options$signal9 !== undefined && + _options$signal9.aborted + ) { + throw new AbortError() } + if (number-- <= 0) { - yield val; + yield val } } - }.call(this); + }.call(this) } function take(number, options = undefined) { if (options != null) { - validateObject(options, 'options'); + validateObject(options, 'options') } - if (options?.signal != null) { - validateAbortSignal(options.signal, 'options.signal'); + + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - number = toIntegerOrInfinity(number); + number = toIntegerOrInfinity(number) return async function* take() { - if (options?.signal?.aborted) { - throw new AbortError(); + var _options$signal10 + + if ( + options !== null && + options !== undefined && + (_options$signal10 = options.signal) !== null && + _options$signal10 !== undefined && + _options$signal10.aborted + ) { + throw new AbortError() } + for await (const val of this) { - if (options?.signal?.aborted) { - throw new AbortError(); + var _options$signal11 + + if ( + options !== null && + options !== undefined && + (_options$signal11 = options.signal) !== null && + _options$signal11 !== undefined && + _options$signal11.aborted + ) { + throw new AbortError() } + if (number-- > 0) { - yield val; + yield val } else { - return; + return } } - }.call(this); + }.call(this) } module.exports.streamReturningOperators = { @@ -396,14 +525,13 @@ module.exports.streamReturningOperators = { filter, flatMap, map, - take, -}; - + take +} module.exports.promiseReturningOperators = { every, forEach, reduce, toArray, some, - find, -}; + find +} diff --git a/lib/internal/streams/passthrough.js b/lib/internal/streams/passthrough.js index 8ec6a791ee..55c551723e 100644 --- a/lib/internal/streams/passthrough.js +++ b/lib/internal/streams/passthrough.js @@ -18,30 +18,25 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // a passthrough stream. // basically just the most minimal sort of Transform stream. // Every written chunk gets output as-is. +'use strict' -'use strict'; +const { ObjectSetPrototypeOf } = require('../../ours/primordials') -const { - ObjectSetPrototypeOf, -} = require('../../ours/primordials'); +module.exports = PassThrough -module.exports = PassThrough; +const Transform = require('./transform') -const Transform = require('./transform'); -ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype); -ObjectSetPrototypeOf(PassThrough, Transform); +ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype) +ObjectSetPrototypeOf(PassThrough, Transform) function PassThrough(options) { - if (!(this instanceof PassThrough)) - return new PassThrough(options); - - Transform.call(this, options); + if (!(this instanceof PassThrough)) return new PassThrough(options) + Transform.call(this, options) } -PassThrough.prototype._transform = function(chunk, encoding, cb) { - cb(null, chunk); -}; +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk) +} diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index 815044191f..3deb0f2d9f 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -1,387 +1,416 @@ // Ported from https://github.com/mafintosh/pump with // permission from the author, Mathias Buus (@mafintosh). +'use strict' -'use strict'; +const { ArrayIsArray, Promise, SymbolAsyncIterator } = require('../../ours/primordials') + +const eos = require('./end-of-stream') + +const { once } = require('../../ours/util') + +const destroyImpl = require('./destroy') + +const Duplex = require('./duplex') -const { - ArrayIsArray, - Promise, - SymbolAsyncIterator, -} = require('../../ours/primordials'); - -const eos = require('./end-of-stream'); -const { once } = require('../../ours/util'); -const destroyImpl = require('./destroy'); -const Duplex = require('./duplex'); const { aggregateTwoErrors, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_INVALID_RETURN_VALUE, - ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED, - }, - AbortError, -} = require('../../ours/errors'); + codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE, ERR_MISSING_ARGS, ERR_STREAM_DESTROYED }, + AbortError +} = require('../../ours/errors') -const { - validateFunction, - validateAbortSignal -} = require('../validators'); +const { validateFunction, validateAbortSignal } = require('../validators') -const { - isIterable, - isReadable, - isReadableNodeStream, - isNodeStream, -} = require('./utils'); - - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; - } - +const { isIterable, isReadable, isReadableNodeStream, isNodeStream } = require('./utils') + +if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController +} -let PassThrough; -let Readable; +let PassThrough +let Readable function destroyer(stream, reading, writing) { - let finished = false; + let finished = false stream.on('close', () => { - finished = true; - }); - - const cleanup = eos(stream, { readable: reading, writable: writing }, (err) => { - finished = !err; - }); - + finished = true + }) + const cleanup = eos( + stream, + { + readable: reading, + writable: writing + }, + (err) => { + finished = !err + } + ) return { destroy: (err) => { - if (finished) return; - finished = true; - destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')); + if (finished) return + finished = true + destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')) }, cleanup - }; + } } function popCallback(streams) { // Streams should never be an empty array. It should always contain at least // a single stream. Therefore optimize for the average case instead of // checking for length === 0 as well. - validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]'); - return streams.pop(); + validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]') + return streams.pop() } function makeAsyncIterable(val) { if (isIterable(val)) { - return val; + return val } else if (isReadableNodeStream(val)) { // Legacy streams are not Iterable. - return fromReadable(val); + return fromReadable(val) } - throw new ERR_INVALID_ARG_TYPE( - 'val', ['Readable', 'Iterable', 'AsyncIterable'], val); + + throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val) } async function* fromReadable(val) { if (!Readable) { - Readable = require('./readable'); + Readable = require('./readable') } - yield* Readable.prototype[SymbolAsyncIterator].call(val); + yield* Readable.prototype[SymbolAsyncIterator].call(val) } async function pump(iterable, writable, finish, { end }) { - let error; - let onresolve = null; + let error + let onresolve = null const resume = (err) => { if (err) { - error = err; + error = err } if (onresolve) { - const callback = onresolve; - onresolve = null; - callback(); + const callback = onresolve + onresolve = null + callback() } - }; + } - const wait = () => new Promise((resolve, reject) => { - if (error) { - reject(error); - } else { - onresolve = () => { - if (error) { - reject(error); - } else { - resolve(); + const wait = () => + new Promise((resolve, reject) => { + if (error) { + reject(error) + } else { + onresolve = () => { + if (error) { + reject(error) + } else { + resolve() + } } - }; - } - }); + } + }) - writable.on('drain', resume); - const cleanup = eos(writable, { readable: false }, resume); + writable.on('drain', resume) + const cleanup = eos( + writable, + { + readable: false + }, + resume + ) try { if (writable.writableNeedDrain) { - await wait(); + await wait() } for await (const chunk of iterable) { if (!writable.write(chunk)) { - await wait(); + await wait() } } if (end) { - writable.end(); + writable.end() } - await wait(); - - finish(); + await wait() + finish() } catch (err) { - finish(error !== err ? aggregateTwoErrors(error, err) : err); + finish(error !== err ? aggregateTwoErrors(error, err) : err) } finally { - cleanup(); - writable.off('drain', resume); + cleanup() + writable.off('drain', resume) } } function pipeline(...streams) { - return pipelineImpl(streams, once(popCallback(streams))); + return pipelineImpl(streams, once(popCallback(streams))) } function pipelineImpl(streams, callback, opts) { if (streams.length === 1 && ArrayIsArray(streams[0])) { - streams = streams[0]; + streams = streams[0] } if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); + throw new ERR_MISSING_ARGS('streams') } - const ac = new AbortController(); - const signal = ac.signal; - const outerSignal = opts?.signal; - - // Need to cleanup event listeners if last stream is readable + const ac = new AbortController() + const signal = ac.signal + const outerSignal = opts === null || opts === undefined ? undefined : opts.signal // Need to cleanup event listeners if last stream is readable // https://github.com/nodejs/node/issues/35452 - const lastStreamCleanup = []; - validateAbortSignal(outerSignal, 'options.signal'); + const lastStreamCleanup = [] + validateAbortSignal(outerSignal, 'options.signal') function abort() { - finishImpl(new AbortError()); + finishImpl(new AbortError()) } - outerSignal?.addEventListener('abort', abort); - - let error; - let value; - const destroys = []; - - let finishCount = 0; + outerSignal === null || outerSignal === undefined ? undefined : outerSignal.addEventListener('abort', abort) + let error + let value + const destroys = [] + let finishCount = 0 function finish(err) { - finishImpl(err, --finishCount === 0); + finishImpl(err, --finishCount === 0) } function finishImpl(err, final) { if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) { - error = err; + error = err } if (!error && !final) { - return; + return } while (destroys.length) { - destroys.shift()(error); + destroys.shift()(error) } - outerSignal?.removeEventListener('abort', abort); - ac.abort(); + outerSignal === null || outerSignal === undefined ? undefined : outerSignal.removeEventListener('abort', abort) + ac.abort() if (final) { if (!error) { - lastStreamCleanup.forEach((fn) => fn()); + lastStreamCleanup.forEach((fn) => fn()) } - process.nextTick(callback, error, value); + + process.nextTick(callback, error, value) } } - let ret; + let ret + for (let i = 0; i < streams.length; i++) { - const stream = streams[i]; - const reading = i < streams.length - 1; - const writing = i > 0; - const end = reading || opts?.end !== false; - const isLastStream = i === streams.length - 1; + const stream = streams[i] + const reading = i < streams.length - 1 + const writing = i > 0 + const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false + const isLastStream = i === streams.length - 1 if (isNodeStream(stream)) { if (end) { - const { destroy, cleanup } = destroyer(stream, reading, writing); - destroys.push(destroy); + const { destroy, cleanup } = destroyer(stream, reading, writing) + destroys.push(destroy) if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup); + lastStreamCleanup.push(cleanup) } - } + } // Catch stream errors that occur after pipe/pump has completed. - // Catch stream errors that occur after pipe/pump has completed. function onError(err) { - if ( - err && - err.name !== 'AbortError' && - err.code !== 'ERR_STREAM_PREMATURE_CLOSE' - ) { - finish(err); + if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + finish(err) } } - stream.on('error', onError); + + stream.on('error', onError) + if (isReadable(stream) && isLastStream) { lastStreamCleanup.push(() => { - stream.removeListener('error', onError); - }); + stream.removeListener('error', onError) + }) } } if (i === 0) { if (typeof stream === 'function') { - ret = stream({ signal }); + ret = stream({ + signal + }) + if (!isIterable(ret)) { - throw new ERR_INVALID_RETURN_VALUE( - 'Iterable, AsyncIterable or Stream', 'source', ret); + throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret) } } else if (isIterable(stream) || isReadableNodeStream(stream)) { - ret = stream; + ret = stream } else { - ret = Duplex.from(stream); + ret = Duplex.from(stream) } } else if (typeof stream === 'function') { - ret = makeAsyncIterable(ret); - ret = stream(ret, { signal }); + ret = makeAsyncIterable(ret) + ret = stream(ret, { + signal + }) if (reading) { if (!isIterable(ret, true)) { - throw new ERR_INVALID_RETURN_VALUE( - 'AsyncIterable', `transform[${i - 1}]`, ret); + throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret) } } else { - if (!PassThrough) { - PassThrough = require('./passthrough'); - } + var _ret - // If the last argument to pipeline is not a stream + if (!PassThrough) { + PassThrough = require('./passthrough') + } // If the last argument to pipeline is not a stream // we must create a proxy stream so that pipeline(...) // always returns a stream which can be further // composed through `.pipe(stream)`. const pt = new PassThrough({ objectMode: true - }); - - // Handle Promises/A+ spec, `then` could be a getter that throws on + }) // Handle Promises/A+ spec, `then` could be a getter that throws on // second use. - const then = ret?.then; + + const then = (_ret = ret) === null || _ret === undefined ? undefined : _ret.then + if (typeof then === 'function') { - finishCount++; - then.call(ret, - (val) => { - value = val; - if (val != null) { - pt.write(val); - } - if (end) { - pt.end(); - } - process.nextTick(finish); - }, (err) => { - pt.destroy(err); - process.nextTick(finish, err); - }, - ); + finishCount++ + then.call( + ret, + (val) => { + value = val + + if (val != null) { + pt.write(val) + } + + if (end) { + pt.end() + } + + process.nextTick(finish) + }, + (err) => { + pt.destroy(err) + process.nextTick(finish, err) + } + ) } else if (isIterable(ret, true)) { - finishCount++; - pump(ret, pt, finish, { end }); + finishCount++ + pump(ret, pt, finish, { + end + }) } else { - throw new ERR_INVALID_RETURN_VALUE( - 'AsyncIterable or Promise', 'destination', ret); + throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret) } - ret = pt; + ret = pt + const { destroy, cleanup } = destroyer(ret, false, true) + destroys.push(destroy) - const { destroy, cleanup } = destroyer(ret, false, true); - destroys.push(destroy); if (isLastStream) { - lastStreamCleanup.push(cleanup); + lastStreamCleanup.push(cleanup) } } } else if (isNodeStream(stream)) { if (isReadableNodeStream(ret)) { - finishCount += 2; - const cleanup = pipe(ret, stream, finish, { end }); + finishCount += 2 + const cleanup = pipe(ret, stream, finish, { + end + }) + if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup); + lastStreamCleanup.push(cleanup) } } else if (isIterable(ret)) { - finishCount++; - pump(ret, stream, finish, { end }); + finishCount++ + pump(ret, stream, finish, { + end + }) } else { - throw new ERR_INVALID_ARG_TYPE( - 'val', ['Readable', 'Iterable', 'AsyncIterable'], ret); + throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], ret) } - ret = stream; + + ret = stream } else { - ret = Duplex.from(stream); + ret = Duplex.from(stream) } } - if (signal?.aborted || outerSignal?.aborted) { - process.nextTick(abort); + if ( + (signal !== null && signal !== undefined && signal.aborted) || + (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted) + ) { + process.nextTick(abort) } - return ret; + return ret } function pipe(src, dst, finish, { end }) { - src.pipe(dst, { end }); + src.pipe(dst, { + end + }) if (end) { // Compat. Before node v10.12.0 stdio used to throw an error so // pipe() did/does not end() stdio destinations. // Now they allow it but "secretly" don't close the underlying fd. - src.once('end', () => dst.end()); + src.once('end', () => dst.end()) } else { - finish(); + finish() } - eos(src, { readable: true, writable: false }, (err) => { - const rState = src._readableState; - if ( - err && - err.code === 'ERR_STREAM_PREMATURE_CLOSE' && - (rState && rState.ended && !rState.errored && !rState.errorEmitted) - ) { - // Some readable streams will emit 'close' before 'end'. However, since - // this is on the readable side 'end' should still be emitted if the - // stream has been ended and no error emitted. This should be allowed in - // favor of backwards compatibility. Since the stream is piped to a - // destination this should not result in any observable difference. - // We don't need to check if this is a writable premature close since - // eos will only fail with premature close on the reading side for - // duplex streams. - src - .once('end', finish) - .once('error', finish); - } else { - finish(err); + eos( + src, + { + readable: true, + writable: false + }, + (err) => { + const rState = src._readableState + + if ( + err && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' && + rState && + rState.ended && + !rState.errored && + !rState.errorEmitted + ) { + // Some readable streams will emit 'close' before 'end'. However, since + // this is on the readable side 'end' should still be emitted if the + // stream has been ended and no error emitted. This should be allowed in + // favor of backwards compatibility. Since the stream is piped to a + // destination this should not result in any observable difference. + // We don't need to check if this is a writable premature close since + // eos will only fail with premature close on the reading side for + // duplex streams. + src.once('end', finish).once('error', finish) + } else { + finish(err) + } } - }); - return eos(dst, { readable: false, writable: true }, finish); + ) + return eos( + dst, + { + readable: false, + writable: true + }, + finish + ) } -module.exports = { pipelineImpl, pipeline }; +module.exports = { + pipelineImpl, + pipeline +} diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index 10d7ace043..f8b899a623 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -18,8 +18,7 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; +'use strict' const { ArrayPrototypeIndexOf, @@ -33,29 +32,30 @@ const { SafeSet, SymbolAsyncIterator, Symbol -} = require('../../ours/primordials'); +} = require('../../ours/primordials') -module.exports = Readable; -Readable.ReadableState = ReadableState; +module.exports = Readable +Readable.ReadableState = ReadableState -const EE = require('events'); -const { Stream, prependListener } = require('./legacy'); -const { Buffer } = require('buffer'); +const EE = require('events') -const { - addAbortSignal, -} = require('./add-abort-signal'); -const eos = require('./end-of-stream'); +const { Stream, prependListener } = require('./legacy') + +const { Buffer } = require('buffer') + +const { addAbortSignal } = require('./add-abort-signal') + +const eos = require('./end-of-stream') let debug = require('util').debuglog('stream', (fn) => { - debug = fn; -}); -const BufferList = require('./buffer_list'); -const destroyImpl = require('./destroy'); -const { - getHighWaterMark, - getDefaultHighWaterMark -} = require('./state'); + debug = fn +}) + +const BufferList = require('./buffer_list') + +const destroyImpl = require('./destroy') + +const { getHighWaterMark, getDefaultHighWaterMark } = require('./state') const { aggregateTwoErrors, @@ -64,21 +64,24 @@ const { ERR_METHOD_NOT_IMPLEMENTED, ERR_OUT_OF_RANGE, ERR_STREAM_PUSH_AFTER_EOF, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT } -} = require('../../ours/errors'); -const { validateObject } = require('../validators'); +} = require('../../ours/errors') + +const { validateObject } = require('../validators') + +const kPaused = Symbol('kPaused') + +const { StringDecoder } = require('string_decoder') -const kPaused = Symbol('kPaused'); +const from = require('./from') -const { StringDecoder } = require('string_decoder'); -const from = require('./from'); +ObjectSetPrototypeOf(Readable.prototype, Stream.prototype) +ObjectSetPrototypeOf(Readable, Stream) -ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); -ObjectSetPrototypeOf(Readable, Stream); -const nop = () => {}; +const nop = () => {} -const { errorOrDestroy } = destroyImpl; +const { errorOrDestroy } = destroyImpl function ReadableState(options, stream, isDuplex) { // Duplex streams are both readable and writable, but share @@ -86,365 +89,307 @@ function ReadableState(options, stream, isDuplex) { // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream. // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== 'boolean') - isDuplex = stream instanceof require('./duplex'); - - // Object stream flag. Used to make read(n) ignore n and to + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex') // Object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away. - this.objectMode = !!(options && options.objectMode); - - if (isDuplex) - this.objectMode = this.objectMode || - !!(options && options.readableObjectMode); - // The point at which it stops calling _read() to fill the buffer + this.objectMode = !!(options && options.objectMode) + if (isDuplex) this.objectMode = this.objectMode || !!(options && options.readableObjectMode) // The point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = options ? - getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex) : - getDefaultHighWaterMark(false); - // A linked list is used to store data chunks instead of an array because the + this.highWaterMark = options + ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex) + : getDefaultHighWaterMark(false) // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than // array.shift(). - this.buffer = new BufferList(); - this.length = 0; - this.pipes = []; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // Stream is still being constructed and cannot be + + this.buffer = new BufferList() + this.length = 0 + this.pipes = [] + this.flowing = null + this.ended = false + this.endEmitted = false + this.reading = false // Stream is still being constructed and cannot be // destroyed until construction finished or failed. // Async construction is opt in, therefore we start as // constructed. - this.constructed = true; - // A flag to be able to tell if the event 'readable'/'data' is emitted + this.constructed = true // A flag to be able to tell if the event 'readable'/'data' is emitted // immediately, or on a later tick. We set this to true at first, because // any actions that shouldn't happen until "later" should generally also // not happen before the first read call. - this.sync = true; - // Whenever we return null, then we set a flag to say + this.sync = true // Whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this[kPaused] = null; - // True if the error was already emitted and should not be thrown again. - this.errorEmitted = false; + this.needReadable = false + this.emittedReadable = false + this.readableListening = false + this.resumeScheduled = false + this[kPaused] = null // True if the error was already emitted and should not be thrown again. - // Should close be emitted on destroy. Defaults to true. - this.emitClose = !options || options.emitClose !== false; + this.errorEmitted = false // Should close be emitted on destroy. Defaults to true. - // Should .destroy() be called after 'end' (and potentially 'finish'). - this.autoDestroy = !options || options.autoDestroy !== false; + this.emitClose = !options || options.emitClose !== false // Should .destroy() be called after 'end' (and potentially 'finish'). - // Has it been destroyed. - this.destroyed = false; + this.autoDestroy = !options || options.autoDestroy !== false // Has it been destroyed. - // Indicates whether the stream has errored. When true no further + this.destroyed = false // Indicates whether the stream has errored. When true no further // _read calls, 'data' or 'readable' events should occur. This is needed // since when autoDestroy is disabled we need a way to tell whether the // stream has failed. - this.errored = null; - // Indicates whether the stream has finished destroying. - this.closed = false; + this.errored = null // Indicates whether the stream has finished destroying. - // True if close has been emitted or would have been emitted + this.closed = false // True if close has been emitted or would have been emitted // depending on emitClose. - this.closeEmitted = false; - // Crypto is kind of old and crusty. Historically, its default string + this.closeEmitted = false // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'; - // Ref the piped dest which we need a drain event on it + this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' // Ref the piped dest which we need a drain event on it // type: null | Writable | Set. - this.awaitDrainWriters = null; - this.multiAwaitDrain = false; - // If true, a maybeReadMore has been scheduled. - this.readingMore = false; + this.awaitDrainWriters = null + this.multiAwaitDrain = false // If true, a maybeReadMore has been scheduled. - this.dataEmitted = false; + this.readingMore = false + this.dataEmitted = false + this.decoder = null + this.encoding = null - this.decoder = null; - this.encoding = null; if (options && options.encoding) { - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; + this.decoder = new StringDecoder(options.encoding) + this.encoding = options.encoding } } - function Readable(options) { - if (!(this instanceof Readable)) - return new Readable(options); - - // Checking for a Stream.Duplex instance is faster here instead of inside + if (!(this instanceof Readable)) return new Readable(options) // Checking for a Stream.Duplex instance is faster here instead of inside // the ReadableState constructor, at least with V8 6.5. - const isDuplex = this instanceof require('./duplex'); - - this._readableState = new ReadableState(options, this, isDuplex); - if (options) { - if (typeof options.read === 'function') - this._read = options.read; - - if (typeof options.destroy === 'function') - this._destroy = options.destroy; + const isDuplex = this instanceof require('./duplex') - if (typeof options.construct === 'function') - this._construct = options.construct; + this._readableState = new ReadableState(options, this, isDuplex) - if (options.signal && !isDuplex) - addAbortSignal(options.signal, this); + if (options) { + if (typeof options.read === 'function') this._read = options.read + if (typeof options.destroy === 'function') this._destroy = options.destroy + if (typeof options.construct === 'function') this._construct = options.construct + if (options.signal && !isDuplex) addAbortSignal(options.signal, this) } - Stream.call(this, options); - + Stream.call(this, options) destroyImpl.construct(this, () => { if (this._readableState.needReadable) { - maybeReadMore(this, this._readableState); + maybeReadMore(this, this._readableState) } - }); + }) } -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function(err, cb) { - cb(err); -}; +Readable.prototype.destroy = destroyImpl.destroy +Readable.prototype._undestroy = destroyImpl.undestroy -Readable.prototype[EE.captureRejectionSymbol] = function(err) { - this.destroy(err); -}; +Readable.prototype._destroy = function (err, cb) { + cb(err) +} -// Manually shove something into the read() buffer. +Readable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err) +} // Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. -Readable.prototype.push = function(chunk, encoding) { - return readableAddChunk(this, chunk, encoding, false); -}; -// Unshift should *always* be something directly out of read(). -Readable.prototype.unshift = function(chunk, encoding) { - return readableAddChunk(this, chunk, encoding, true); -}; +Readable.prototype.push = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, false) +} // Unshift should *always* be something directly out of read(). + +Readable.prototype.unshift = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, true) +} function readableAddChunk(stream, chunk, encoding, addToFront) { - debug('readableAddChunk', chunk); - const state = stream._readableState; + debug('readableAddChunk', chunk) + const state = stream._readableState + let err - let err; if (!state.objectMode) { if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; + encoding = encoding || state.defaultEncoding + if (state.encoding !== encoding) { if (addToFront && state.encoding) { // When unshifting, if state.encoding is set, we have to save // the string in the BufferList with the state encoding. - chunk = Buffer.from(chunk, encoding).toString(state.encoding); + chunk = Buffer.from(chunk, encoding).toString(state.encoding) } else { - chunk = Buffer.from(chunk, encoding); - encoding = ''; + chunk = Buffer.from(chunk, encoding) + encoding = '' } } } else if (chunk instanceof Buffer) { - encoding = ''; + encoding = '' } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk); - encoding = ''; + chunk = Stream._uint8ArrayToBuffer(chunk) + encoding = '' } else if (chunk != null) { - err = new ERR_INVALID_ARG_TYPE( - 'chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) } } if (err) { - errorOrDestroy(stream, err); + errorOrDestroy(stream, err) } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); + state.reading = false + onEofChunk(stream, state) } else if (state.objectMode || (chunk && chunk.length > 0)) { if (addToFront) { - if (state.endEmitted) - errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()); - else if (state.destroyed || state.errored) - return false; - else - addChunk(stream, state, chunk, true); + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()) + else if (state.destroyed || state.errored) return false + else addChunk(stream, state, chunk, true) } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()) } else if (state.destroyed || state.errored) { - return false; + return false } else { - state.reading = false; + state.reading = false + if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) - addChunk(stream, state, chunk, false); - else - maybeReadMore(stream, state); + chunk = state.decoder.write(chunk) + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false) + else maybeReadMore(stream, state) } else { - addChunk(stream, state, chunk, false); + addChunk(stream, state, chunk, false) } } } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } - - // We can push more data if we are below the highWaterMark. + state.reading = false + maybeReadMore(stream, state) + } // We can push more data if we are below the highWaterMark. // Also, if we have no data yet, we can stand some more bytes. // This is to work around cases where hwm=0, such as the repl. - return !state.ended && - (state.length < state.highWaterMark || state.length === 0); + + return !state.ended && (state.length < state.highWaterMark || state.length === 0) } function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync && - stream.listenerCount('data') > 0) { + if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) { // Use the guard to avoid creating `Set()` repeatedly // when we have multiple pipes. if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear(); + state.awaitDrainWriters.clear() } else { - state.awaitDrainWriters = null; + state.awaitDrainWriters = null } - state.dataEmitted = true; - stream.emit('data', chunk); + state.dataEmitted = true + stream.emit('data', chunk) } else { // Update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) - state.buffer.unshift(chunk); - else - state.buffer.push(chunk); - - if (state.needReadable) - emitReadable(stream); + state.length += state.objectMode ? 1 : chunk.length + if (addToFront) state.buffer.unshift(chunk) + else state.buffer.push(chunk) + if (state.needReadable) emitReadable(stream) } - maybeReadMore(stream, state); + + maybeReadMore(stream, state) } -Readable.prototype.isPaused = function() { - const state = this._readableState; - return state[kPaused] === true || state.flowing === false; -}; - -// Backwards compatibility. -Readable.prototype.setEncoding = function(enc) { - const decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; - // If setEncoding(null), decoder.encoding equals utf8. - this._readableState.encoding = this._readableState.decoder.encoding; - - const buffer = this._readableState.buffer; - // Iterate over current buffer to convert already stored Buffers: - let content = ''; +Readable.prototype.isPaused = function () { + const state = this._readableState + return state[kPaused] === true || state.flowing === false +} // Backwards compatibility. + +Readable.prototype.setEncoding = function (enc) { + const decoder = new StringDecoder(enc) + this._readableState.decoder = decoder // If setEncoding(null), decoder.encoding equals utf8. + + this._readableState.encoding = this._readableState.decoder.encoding + const buffer = this._readableState.buffer // Iterate over current buffer to convert already stored Buffers: + + let content = '' + for (const data of buffer) { - content += decoder.write(data); + content += decoder.write(data) } - buffer.clear(); - if (content !== '') - buffer.push(content); - this._readableState.length = content.length; - return this; -}; - -// Don't raise the hwm > 1GB. -const MAX_HWM = 0x40000000; + + buffer.clear() + if (content !== '') buffer.push(content) + this._readableState.length = content.length + return this +} // Don't raise the hwm > 1GB. + +const MAX_HWM = 0x40000000 + function computeNewHighWaterMark(n) { if (n > MAX_HWM) { - throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n); + throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n) } else { // Get the next highest power of 2 to prevent increasing hwm excessively in // tiny amounts. - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; + n-- + n |= n >>> 1 + n |= n >>> 2 + n |= n >>> 4 + n |= n >>> 8 + n |= n >>> 16 + n++ } - return n; -} -// This function is designed to be inlinable, so please take care when making + return n +} // This function is designed to be inlinable, so please take care when making // changes to the function body. + function howMuchToRead(n, state) { - if (n <= 0 || (state.length === 0 && state.ended)) - return 0; - if (state.objectMode) - return 1; + if (n <= 0 || (state.length === 0 && state.ended)) return 0 + if (state.objectMode) return 1 + if (NumberIsNaN(n)) { // Only flow one buffer at a time. - if (state.flowing && state.length) - return state.buffer.first().length; - return state.length; + if (state.flowing && state.length) return state.buffer.first().length + return state.length } - if (n <= state.length) - return n; - return state.ended ? state.length : 0; -} -// You can override either this method, or the async _read(n) below. -Readable.prototype.read = function(n) { - debug('read', n); - // Same as parseInt(undefined, 10), however V8 7.3 performance regressed + if (n <= state.length) return n + return state.ended ? state.length : 0 +} // You can override either this method, or the async _read(n) below. + +Readable.prototype.read = function (n) { + debug('read', n) // Same as parseInt(undefined, 10), however V8 7.3 performance regressed // in this scenario, so we are doing it manually. + if (n === undefined) { - n = NaN; + n = NaN } else if (!NumberIsInteger(n)) { - n = NumberParseInt(n, 10); + n = NumberParseInt(n, 10) } - const state = this._readableState; - const nOrig = n; - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) - state.highWaterMark = computeNewHighWaterMark(n); + const state = this._readableState + const nOrig = n // If we're asking for more than the current hwm, then raise the hwm. - if (n !== 0) - state.emittedReadable = false; - - // If we're doing read(0) to trigger a readable event, but we + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n) + if (n !== 0) state.emittedReadable = false // If we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. - if (n === 0 && - state.needReadable && - ((state.highWaterMark !== 0 ? - state.length >= state.highWaterMark : - state.length > 0) || - state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) - endReadable(this); - else - emitReadable(this); - return null; + + if ( + n === 0 && + state.needReadable && + ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended) + ) { + debug('read: emitReadable', state.length, state.ended) + if (state.length === 0 && state.ended) endReadable(this) + else emitReadable(this) + return null } - n = howMuchToRead(n, state); + n = howMuchToRead(n, state) // If we've ended, and we're now clear, then finish it up. - // If we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { - if (state.length === 0) - endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be + if (state.length === 0) endReadable(this) + return null + } // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change @@ -465,156 +410,144 @@ Readable.prototype.read = function(n) { // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - let doRead = state.needReadable; - debug('need readable', doRead); - // If we currently have less than the highWaterMark, then also read some. - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } + let doRead = state.needReadable + debug('need readable', doRead) // If we currently have less than the highWaterMark, then also read some. - // However, if we've ended, then there's no point, if we're already + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true + debug('length less than watermark', doRead) + } // However, if we've ended, then there's no point, if we're already // reading, then it's unnecessary, if we're constructing we have to wait, // and if we're destroyed or errored, then it's not allowed, - if (state.ended || state.reading || state.destroyed || state.errored || - !state.constructed) { - doRead = false; - debug('reading, ended or constructing', doRead); + + if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) { + doRead = false + debug('reading, ended or constructing', doRead) } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // If the length is currently zero, then we *need* a readable event. - if (state.length === 0) - state.needReadable = true; - - // Call internal read method + debug('do read') + state.reading = true + state.sync = true // If the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true // Call internal read method + try { - this._read(state.highWaterMark); + this._read(state.highWaterMark) } catch (err) { - errorOrDestroy(this, err); + errorOrDestroy(this, err) } - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, + state.sync = false // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) - n = howMuchToRead(nOrig, state); + + if (!state.reading) n = howMuchToRead(nOrig, state) } - let ret; - if (n > 0) - ret = fromList(n, state); - else - ret = null; + let ret + if (n > 0) ret = fromList(n, state) + else ret = null if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; + state.needReadable = state.length <= state.highWaterMark + n = 0 } else { - state.length -= n; + state.length -= n + if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear(); + state.awaitDrainWriters.clear() } else { - state.awaitDrainWriters = null; + state.awaitDrainWriters = null } } if (state.length === 0) { // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. - if (!state.ended) - state.needReadable = true; + if (!state.ended) state.needReadable = true // If we tried to read() past the EOF, then emit end on the next tick. - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) - endReadable(this); + if (nOrig !== n && state.ended) endReadable(this) } if (ret !== null && !state.errorEmitted && !state.closeEmitted) { - state.dataEmitted = true; - this.emit('data', ret); + state.dataEmitted = true + this.emit('data', ret) } - return ret; -}; + return ret +} function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; + debug('onEofChunk') + if (state.ended) return + if (state.decoder) { - const chunk = state.decoder.end(); + const chunk = state.decoder.end() + if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; + state.buffer.push(chunk) + state.length += state.objectMode ? 1 : chunk.length } } - state.ended = true; + + state.ended = true if (state.sync) { // If we are sync, wait until next tick to emit the data. // Otherwise we risk emitting data in the flow() // the readable code triggers during a read() call. - emitReadable(stream); + emitReadable(stream) } else { // Emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - state.emittedReadable = true; - // We have to emit readable now that we are EOF. Modules + state.needReadable = false + state.emittedReadable = true // We have to emit readable now that we are EOF. Modules // in the ecosystem (e.g. dicer) rely on this event being sync. - emitReadable_(stream); - } -} -// Don't emit readable right away in sync mode, because this can trigger + emitReadable_(stream) + } +} // Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. + function emitReadable(stream) { - const state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; + const state = stream._readableState + debug('emitReadable', state.needReadable, state.emittedReadable) + state.needReadable = false + if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); + debug('emitReadable', state.flowing) + state.emittedReadable = true + process.nextTick(emitReadable_, stream) } } function emitReadable_(stream) { - const state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && !state.errored && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } + const state = stream._readableState + debug('emitReadable_', state.destroyed, state.length, state.ended) - // The stream needs another readable event if: + if (!state.destroyed && !state.errored && (state.length || state.ended)) { + stream.emit('readable') + state.emittedReadable = false + } // The stream needs another readable event if: // 1. It is not flowing, as the flow mechanism will take // care of it. // 2. It is not ended. // 3. It is below the highWaterMark, so we can schedule // another readable later. - state.needReadable = - !state.flowing && - !state.ended && - state.length <= state.highWaterMark; - flow(stream); -} - -// At this point, the user has presumably seen the 'readable' event, + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark + flow(stream) +} // At this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. + function maybeReadMore(stream, state) { if (!state.readingMore && state.constructed) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); + state.readingMore = true + process.nextTick(maybeReadMore_, stream, state) } } @@ -642,96 +575,89 @@ function maybeReadMore_(stream, state) { // called push() with new data. In this case we skip performing more // read()s. The execution ends in this method again after the _read() ends // up calling push() with more data. - while (!state.reading && !state.ended && - (state.length < state.highWaterMark || - (state.flowing && state.length === 0))) { - const len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); + while ( + !state.reading && + !state.ended && + (state.length < state.highWaterMark || (state.flowing && state.length === 0)) + ) { + const len = state.length + debug('maybeReadMore read 0') + stream.read(0) if (len === state.length) // Didn't get any data, stop spinning. - break; + break } - state.readingMore = false; -} -// Abstract method. to be overridden in specific implementation classes. + state.readingMore = false +} // Abstract method. to be overridden in specific implementation classes. // call cb(er, data) where data is <= n in length. // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function(n) { - throw new ERR_METHOD_NOT_IMPLEMENTED('_read()'); -}; -Readable.prototype.pipe = function(dest, pipeOpts) { - const src = this; - const state = this._readableState; +Readable.prototype._read = function (n) { + throw new ERR_METHOD_NOT_IMPLEMENTED('_read()') +} + +Readable.prototype.pipe = function (dest, pipeOpts) { + const src = this + const state = this._readableState if (state.pipes.length === 1) { if (!state.multiAwaitDrain) { - state.multiAwaitDrain = true; - state.awaitDrainWriters = new SafeSet( - state.awaitDrainWriters ? [state.awaitDrainWriters] : [] - ); + state.multiAwaitDrain = true + state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : []) } } - state.pipes.push(dest); - debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts); + state.pipes.push(dest) + debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts) + const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr + const endFn = doEnd ? onend : unpipe + if (state.endEmitted) process.nextTick(endFn) + else src.once('end', endFn) + dest.on('unpipe', onunpipe) - const doEnd = (!pipeOpts || pipeOpts.end !== false) && - dest !== process.stdout && - dest !== process.stderr; - - const endFn = doEnd ? onend : unpipe; - if (state.endEmitted) - process.nextTick(endFn); - else - src.once('end', endFn); - - dest.on('unpipe', onunpipe); function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); + debug('onunpipe') + if (readable === src) { if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); + unpipeInfo.hasUnpiped = true + cleanup() } } } function onend() { - debug('onend'); - dest.end(); + debug('onend') + dest.end() } - let ondrain; + let ondrain + let cleanedUp = false - let cleanedUp = false; function cleanup() { - debug('cleanup'); - // Cleanup event handlers once the pipe is broken. - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); + debug('cleanup') // Cleanup event handlers once the pipe is broken. + + dest.removeListener('close', onclose) + dest.removeListener('finish', onfinish) + if (ondrain) { - dest.removeListener('drain', ondrain); + dest.removeListener('drain', ondrain) } - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - - cleanedUp = true; - // If the reader is waiting for a drain event from this + dest.removeListener('error', onerror) + dest.removeListener('unpipe', onunpipe) + src.removeListener('end', onend) + src.removeListener('end', unpipe) + src.removeListener('data', ondata) + cleanedUp = true // If the reader is waiting for a drain event from this // specific writer, then it would cause it to never start // flowing again. // So, if this is awaiting a drain, then we just call it now. // If we don't know, then assume that we are waiting for one. - if (ondrain && state.awaitDrainWriters && - (!dest._writableState || dest._writableState.needDrain)) - ondrain(); + + if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain() } function pause() { @@ -741,181 +667,176 @@ Readable.prototype.pipe = function(dest, pipeOpts) { // => Check whether `dest` is still a piping destination. if (!cleanedUp) { if (state.pipes.length === 1 && state.pipes[0] === dest) { - debug('false write response, pause', 0); - state.awaitDrainWriters = dest; - state.multiAwaitDrain = false; + debug('false write response, pause', 0) + state.awaitDrainWriters = dest + state.multiAwaitDrain = false } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { - debug('false write response, pause', state.awaitDrainWriters.size); - state.awaitDrainWriters.add(dest); + debug('false write response, pause', state.awaitDrainWriters.size) + state.awaitDrainWriters.add(dest) } - src.pause(); + + src.pause() } + if (!ondrain) { // When the dest drains, it reduces the awaitDrain counter // on the source. This would be more elegant with a .once() // handler in flow(), but adding and removing repeatedly is // too slow. - ondrain = pipeOnDrain(src, dest); - dest.on('drain', ondrain); + ondrain = pipeOnDrain(src, dest) + dest.on('drain', ondrain) } } - src.on('data', ondata); + src.on('data', ondata) + function ondata(chunk) { - debug('ondata'); - const ret = dest.write(chunk); - debug('dest.write', ret); + debug('ondata') + const ret = dest.write(chunk) + debug('dest.write', ret) + if (ret === false) { - pause(); + pause() } - } - - // If the dest has an error, then stop piping into it. + } // If the dest has an error, then stop piping into it. // However, don't suppress the throwing behavior for this. + function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); + debug('onerror', er) + unpipe() + dest.removeListener('error', onerror) + if (dest.listenerCount('error') === 0) { - const s = dest._writableState || dest._readableState; + const s = dest._writableState || dest._readableState + if (s && !s.errorEmitted) { // User incorrectly emitted 'error' directly on the stream. - errorOrDestroy(dest, er); + errorOrDestroy(dest, er) } else { - dest.emit('error', er); + dest.emit('error', er) } } - } + } // Make sure our error handler is attached before userland ones. - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); + prependListener(dest, 'error', onerror) // Both close and finish should trigger unpipe, but only once. - // Both close and finish should trigger unpipe, but only once. function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); + dest.removeListener('finish', onfinish) + unpipe() } - dest.once('close', onclose); + + dest.once('close', onclose) + function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); + debug('onfinish') + dest.removeListener('close', onclose) + unpipe() } - dest.once('finish', onfinish); - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } + dest.once('finish', onfinish) - // Tell the dest that it's being piped to. - dest.emit('pipe', src); + function unpipe() { + debug('unpipe') + src.unpipe(dest) + } // Tell the dest that it's being piped to. - // Start the flow if it hasn't been started already. + dest.emit('pipe', src) // Start the flow if it hasn't been started already. if (dest.writableNeedDrain === true) { if (state.flowing) { - pause(); + pause() } } else if (!state.flowing) { - debug('pipe resume'); - src.resume(); + debug('pipe resume') + src.resume() } - return dest; -}; + return dest +} function pipeOnDrain(src, dest) { return function pipeOnDrainFunctionResult() { - const state = src._readableState; - - // `ondrain` will call directly, + const state = src._readableState // `ondrain` will call directly, // `this` maybe not a reference to dest, // so we use the real dest here. + if (state.awaitDrainWriters === dest) { - debug('pipeOnDrain', 1); - state.awaitDrainWriters = null; + debug('pipeOnDrain', 1) + state.awaitDrainWriters = null } else if (state.multiAwaitDrain) { - debug('pipeOnDrain', state.awaitDrainWriters.size); - state.awaitDrainWriters.delete(dest); + debug('pipeOnDrain', state.awaitDrainWriters.size) + state.awaitDrainWriters.delete(dest) } - if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && - src.listenerCount('data')) { - src.resume(); + if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) { + src.resume() } - }; + } } +Readable.prototype.unpipe = function (dest) { + const state = this._readableState + const unpipeInfo = { + hasUnpiped: false + } // If we're not piping anywhere, then do nothing. -Readable.prototype.unpipe = function(dest) { - const state = this._readableState; - const unpipeInfo = { hasUnpiped: false }; - - // If we're not piping anywhere, then do nothing. - if (state.pipes.length === 0) - return this; + if (state.pipes.length === 0) return this if (!dest) { // remove all. - const dests = state.pipes; - state.pipes = []; - this.pause(); + const dests = state.pipes + state.pipes = [] + this.pause() for (let i = 0; i < dests.length; i++) - dests[i].emit('unpipe', this, { hasUnpiped: false }); - return this; - } - - // Try to find the right one. - const index = ArrayPrototypeIndexOf(state.pipes, dest); - if (index === -1) - return this; - - state.pipes.splice(index, 1); - if (state.pipes.length === 0) - this.pause(); - - dest.emit('unpipe', this, unpipeInfo); - - return this; -}; - -// Set up data events if they are asked for + dests[i].emit('unpipe', this, { + hasUnpiped: false + }) + + return this + } // Try to find the right one. + + const index = ArrayPrototypeIndexOf(state.pipes, dest) + if (index === -1) return this + state.pipes.splice(index, 1) + if (state.pipes.length === 0) this.pause() + dest.emit('unpipe', this, unpipeInfo) + return this +} // Set up data events if they are asked for // Ensure readable listeners eventually get something. -Readable.prototype.on = function(ev, fn) { - const res = Stream.prototype.on.call(this, ev, fn); - const state = this._readableState; + +Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn) + const state = this._readableState if (ev === 'data') { // Update readableListening so that resume() may be a no-op // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; + state.readableListening = this.listenerCount('readable') > 0 // Try start flowing on next tick if stream isn't explicitly paused. - // Try start flowing on next tick if stream isn't explicitly paused. - if (state.flowing !== false) - this.resume(); + if (state.flowing !== false) this.resume() } else if (ev === 'readable') { if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); + state.readableListening = state.needReadable = true + state.flowing = false + state.emittedReadable = false + debug('on readable', state.length, state.reading) + if (state.length) { - emitReadable(this); + emitReadable(this) } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); + process.nextTick(nReadingNextTick, this) } } } - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; + return res +} + +Readable.prototype.addListener = Readable.prototype.on -Readable.prototype.removeListener = function(ev, fn) { - const res = Stream.prototype.removeListener.call(this, - ev, fn); +Readable.prototype.removeListener = function (ev, fn) { + const res = Stream.prototype.removeListener.call(this, ev, fn) if (ev === 'readable') { // We need to check if there is someone still listening to @@ -924,16 +845,16 @@ Readable.prototype.removeListener = function(ev, fn) { // support once('readable', fn) cycles. This means that calling // resume within the same tick will have no // effect. - process.nextTick(updateReadableListening, this); + process.nextTick(updateReadableListening, this) } - return res; -}; -Readable.prototype.off = Readable.prototype.removeListener; + return res +} + +Readable.prototype.off = Readable.prototype.removeListener -Readable.prototype.removeAllListeners = function(ev) { - const res = Stream.prototype.removeAllListeners.apply(this, - arguments); +Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.apply(this, arguments) if (ev === 'readable' || ev === undefined) { // We need to check if there is someone still listening to @@ -942,465 +863,454 @@ Readable.prototype.removeAllListeners = function(ev) { // support once('readable', fn) cycles. This means that calling // resume within the same tick will have no // effect. - process.nextTick(updateReadableListening, this); + process.nextTick(updateReadableListening, this) } - return res; -}; + return res +} function updateReadableListening(self) { - const state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; + const state = self._readableState + state.readableListening = self.listenerCount('readable') > 0 if (state.resumeScheduled && state[kPaused] === false) { // Flowing needs to be set to true now, otherwise // the upcoming resume will not flow. - state.flowing = true; - - // Crude way to check if we should resume. + state.flowing = true // Crude way to check if we should resume. } else if (self.listenerCount('data') > 0) { - self.resume(); + self.resume() } else if (!state.readableListening) { - state.flowing = null; + state.flowing = null } } function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API + debug('readable nexttick read 0') + self.read(0) +} // pause() and resume() are remnants of the legacy readable stream API // If the user uses them, then switch into old mode. -Readable.prototype.resume = function() { - const state = this._readableState; + +Readable.prototype.resume = function () { + const state = this._readableState + if (!state.flowing) { - debug('resume'); - // We flow only if there is no one listening + debug('resume') // We flow only if there is no one listening // for readable, but we still have to call // resume(). - state.flowing = !state.readableListening; - resume(this, state); + + state.flowing = !state.readableListening + resume(this, state) } - state[kPaused] = false; - return this; -}; + + state[kPaused] = false + return this +} function resume(stream, state) { if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); + state.resumeScheduled = true + process.nextTick(resume_, stream, state) } } function resume_(stream, state) { - debug('resume', state.reading); + debug('resume', state.reading) + if (!state.reading) { - stream.read(0); + stream.read(0) } - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) - stream.read(0); + state.resumeScheduled = false + stream.emit('resume') + flow(stream) + if (state.flowing && !state.reading) stream.read(0) } -Readable.prototype.pause = function() { - debug('call pause flowing=%j', this._readableState.flowing); +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing) + if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); + debug('pause') + this._readableState.flowing = false + this.emit('pause') } - this._readableState[kPaused] = true; - return this; -}; -function flow(stream) { - const state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null); + this._readableState[kPaused] = true + return this } -// Wrap an old-style stream as the async data source. +function flow(stream) { + const state = stream._readableState + debug('flow', state.flowing) + + while (state.flowing && stream.read() !== null); +} // Wrap an old-style stream as the async data source. // This is *not* part of the readable stream interface. // It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function(stream) { - let paused = false; - // TODO (ronag): Should this.destroy(err) emit +Readable.prototype.wrap = function (stream) { + let paused = false // TODO (ronag): Should this.destroy(err) emit // 'error' on the wrapped stream? Would require // a static factory method, e.g. Readable.wrap(stream). stream.on('data', (chunk) => { if (!this.push(chunk) && stream.pause) { - paused = true; - stream.pause(); + paused = true + stream.pause() } - }); - + }) stream.on('end', () => { - this.push(null); - }); - + this.push(null) + }) stream.on('error', (err) => { - errorOrDestroy(this, err); - }); - + errorOrDestroy(this, err) + }) stream.on('close', () => { - this.destroy(); - }); - + this.destroy() + }) stream.on('destroy', () => { - this.destroy(); - }); + this.destroy() + }) this._read = () => { if (paused && stream.resume) { - paused = false; - stream.resume(); + paused = false + stream.resume() } - }; + } // Proxy all the other methods. Important when wrapping filters and duplexes. + + const streamKeys = ObjectKeys(stream) - // Proxy all the other methods. Important when wrapping filters and duplexes. - const streamKeys = ObjectKeys(stream); for (let j = 1; j < streamKeys.length; j++) { - const i = streamKeys[j]; + const i = streamKeys[j] + if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = stream[i].bind(stream); + this[i] = stream[i].bind(stream) } } - return this; -}; + return this +} -Readable.prototype[SymbolAsyncIterator] = function() { - return streamToAsyncIterator(this); -}; +Readable.prototype[SymbolAsyncIterator] = function () { + return streamToAsyncIterator(this) +} -Readable.prototype.iterator = function(options) { +Readable.prototype.iterator = function (options) { if (options !== undefined) { - validateObject(options, 'options'); + validateObject(options, 'options') } - return streamToAsyncIterator(this, options); -}; + + return streamToAsyncIterator(this, options) +} function streamToAsyncIterator(stream, options) { if (typeof stream.read !== 'function') { - stream = Readable.wrap(stream, { objectMode: true }); + stream = Readable.wrap(stream, { + objectMode: true + }) } - const iter = createAsyncIterator(stream, options); - iter.stream = stream; - return iter; + const iter = createAsyncIterator(stream, options) + iter.stream = stream + return iter } async function* createAsyncIterator(stream, options) { - let callback = nop; + let callback = nop function next(resolve) { if (this === stream) { - callback(); - callback = nop; + callback() + callback = nop } else { - callback = resolve; + callback = resolve } } - stream.on('readable', next); - - let error; - const cleanup = eos(stream, { writable: false }, (err) => { - error = err ? aggregateTwoErrors(error, err) : null; - callback(); - callback = nop; - }); + stream.on('readable', next) + let error + const cleanup = eos( + stream, + { + writable: false + }, + (err) => { + error = err ? aggregateTwoErrors(error, err) : null + callback() + callback = nop + } + ) try { while (true) { - const chunk = stream.destroyed ? null : stream.read(); + const chunk = stream.destroyed ? null : stream.read() + if (chunk !== null) { - yield chunk; + yield chunk } else if (error) { - throw error; + throw error } else if (error === null) { - return; + return } else { - await new Promise(next); + await new Promise(next) } } } catch (err) { - error = aggregateTwoErrors(error, err); - throw error; + error = aggregateTwoErrors(error, err) + throw error } finally { if ( - (error || options?.destroyOnReturn !== false) && + (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) && (error === undefined || stream._readableState.autoDestroy) ) { - destroyImpl.destroyer(stream, null); + destroyImpl.destroyer(stream, null) } else { - stream.off('readable', next); - cleanup(); + stream.off('readable', next) + cleanup() } } -} - -// Making it explicit these properties are not enumerable +} // Making it explicit these properties are not enumerable // because otherwise some prototype manipulation in // userland will fail. + ObjectDefineProperties(Readable.prototype, { readable: { get() { - const r = this._readableState; - // r.readable === false means that this is part of a Duplex stream + const r = this._readableState // r.readable === false means that this is part of a Duplex stream // where the readable side was disabled upon construction. // Compat. The user might manually disable readable side through // deprecated setter. - return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && - !r.endEmitted; + + return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted }, + set(val) { // Backwards compat. if (this._readableState) { - this._readableState.readable = !!val; + this._readableState.readable = !!val } } }, - readableDidRead: { enumerable: false, - get: function() { - return this._readableState.dataEmitted; + get: function () { + return this._readableState.dataEmitted } }, - readableAborted: { enumerable: false, - get: function() { + get: function () { return !!( this._readableState.readable !== false && (this._readableState.destroyed || this._readableState.errored) && !this._readableState.endEmitted - ); + ) } }, - readableHighWaterMark: { enumerable: false, - get: function() { - return this._readableState.highWaterMark; + get: function () { + return this._readableState.highWaterMark } }, - readableBuffer: { enumerable: false, - get: function() { - return this._readableState && this._readableState.buffer; + get: function () { + return this._readableState && this._readableState.buffer } }, - readableFlowing: { enumerable: false, - get: function() { - return this._readableState.flowing; + get: function () { + return this._readableState.flowing }, - set: function(state) { + set: function (state) { if (this._readableState) { - this._readableState.flowing = state; + this._readableState.flowing = state } } }, - readableLength: { enumerable: false, + get() { - return this._readableState.length; + return this._readableState.length } }, - readableObjectMode: { enumerable: false, + get() { - return this._readableState ? this._readableState.objectMode : false; + return this._readableState ? this._readableState.objectMode : false } }, - readableEncoding: { enumerable: false, + get() { - return this._readableState ? this._readableState.encoding : null; + return this._readableState ? this._readableState.encoding : null } }, - errored: { enumerable: false, + get() { - return this._readableState ? this._readableState.errored : null; + return this._readableState ? this._readableState.errored : null } }, - closed: { get() { - return this._readableState ? this._readableState.closed : false; + return this._readableState ? this._readableState.closed : false } }, - destroyed: { enumerable: false, + get() { - return this._readableState ? this._readableState.destroyed : false; + return this._readableState ? this._readableState.destroyed : false }, + set(value) { // We ignore the value if the stream // has not been initialized yet. if (!this._readableState) { - return; - } - - // Backward compatibility, the user is explicitly + return + } // Backward compatibility, the user is explicitly // managing destroyed. - this._readableState.destroyed = value; + + this._readableState.destroyed = value } }, - readableEnded: { enumerable: false, + get() { - return this._readableState ? this._readableState.endEmitted : false; + return this._readableState ? this._readableState.endEmitted : false } - }, - -}); - + } +}) ObjectDefineProperties(ReadableState.prototype, { // Legacy getter for `pipesCount`. pipesCount: { get() { - return this.pipes.length; + return this.pipes.length } }, - // Legacy property for `paused`. paused: { get() { - return this[kPaused] !== false; + return this[kPaused] !== false }, + set(value) { - this[kPaused] = !!value; + this[kPaused] = !!value } } -}); +}) // Exposed for testing purposes only. -// Exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. +Readable._fromList = fromList // Pluck off n bytes from an array of buffers. // Length is the combined lengths of all the buffers in the list. // This function is designed to be inlinable, so please take care when making // changes to the function body. + function fromList(n, state) { // nothing buffered. - if (state.length === 0) - return null; - - let ret; - if (state.objectMode) - ret = state.buffer.shift(); + if (state.length === 0) return null + let ret + if (state.objectMode) ret = state.buffer.shift() else if (!n || n >= state.length) { // Read it all, truncate the list. - if (state.decoder) - ret = state.buffer.join(''); - else if (state.buffer.length === 1) - ret = state.buffer.first(); - else - ret = state.buffer.concat(state.length); - state.buffer.clear(); + if (state.decoder) ret = state.buffer.join('') + else if (state.buffer.length === 1) ret = state.buffer.first() + else ret = state.buffer.concat(state.length) + state.buffer.clear() } else { // read part of list. - ret = state.buffer.consume(n, state.decoder); + ret = state.buffer.consume(n, state.decoder) } - - return ret; + return ret } function endReadable(stream) { - const state = stream._readableState; + const state = stream._readableState + debug('endReadable', state.endEmitted) - debug('endReadable', state.endEmitted); if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); + state.ended = true + process.nextTick(endReadableNT, state, stream) } } function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); + debug('endReadableNT', state.endEmitted, state.length) // Check that we didn't get one last unshift. - // Check that we didn't get one last unshift. - if (!state.errored && !state.closeEmitted && - !state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.emit('end'); + if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) { + state.endEmitted = true + stream.emit('end') if (stream.writable && stream.allowHalfOpen === false) { - process.nextTick(endWritableNT, stream); + process.nextTick(endWritableNT, stream) } else if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the writable side is ready for autoDestroy as well. - const wState = stream._writableState; - const autoDestroy = !wState || ( - wState.autoDestroy && - // We don't expect the writable to ever 'finish' - // if writable is explicitly set to false. - (wState.finished || wState.writable === false) - ); + const wState = stream._writableState + const autoDestroy = + !wState || + (wState.autoDestroy && // We don't expect the writable to ever 'finish' + // if writable is explicitly set to false. + (wState.finished || wState.writable === false)) if (autoDestroy) { - stream.destroy(); + stream.destroy() } } } } function endWritableNT(stream) { - const writable = stream.writable && !stream.writableEnded && - !stream.destroyed; + const writable = stream.writable && !stream.writableEnded && !stream.destroyed + if (writable) { - stream.end(); + stream.end() } } -Readable.from = function(iterable, opts) { - return from(Readable, iterable, opts); -}; +Readable.from = function (iterable, opts) { + return from(Readable, iterable, opts) +} -let webStreamsAdapters; +let webStreamsAdapters // Lazy to avoid circular references -// Lazy to avoid circular references function lazyWebStreams() { - if (webStreamsAdapters === undefined) - webStreamsAdapters = {}; - return webStreamsAdapters; + if (webStreamsAdapters === undefined) webStreamsAdapters = {} + return webStreamsAdapters } -Readable.fromWeb = function(readableStream, options) { - return lazyWebStreams().newStreamReadableFromReadableStream( - readableStream, - options); -}; +Readable.fromWeb = function (readableStream, options) { + return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options) +} + +Readable.toWeb = function (streamReadable) { + return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable) +} -Readable.toWeb = function(streamReadable) { - return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable); -}; +Readable.wrap = function (src, options) { + var _ref, _src$readableObjectMo -Readable.wrap = function(src, options) { return new Readable({ - objectMode: src.readableObjectMode ?? src.objectMode ?? true, + objectMode: + (_ref = + (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined + ? _src$readableObjectMo + : src.objectMode) !== null && _ref !== undefined + ? _ref + : true, ...options, + destroy(err, callback) { - destroyImpl.destroyer(src, err); - callback(err); + destroyImpl.destroyer(src, err) + callback(err) } - }).wrap(src); -}; + }).wrap(src) +} diff --git a/lib/internal/streams/state.js b/lib/internal/streams/state.js index 60af7be88b..e7fcebdde9 100644 --- a/lib/internal/streams/state.js +++ b/lib/internal/streams/state.js @@ -1,36 +1,33 @@ -'use strict'; +'use strict' -const { - MathFloor, - NumberIsInteger, -} = require('../../ours/primordials'); +const { MathFloor, NumberIsInteger } = require('../../ours/primordials') -const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes; +const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : - isDuplex ? options[duplexKey] : null; + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null } function getDefaultHighWaterMark(objectMode) { - return objectMode ? 16 : 16 * 1024; + return objectMode ? 16 : 16 * 1024 } function getHighWaterMark(state, options, duplexKey, isDuplex) { - const hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey) + if (hwm != null) { if (!NumberIsInteger(hwm) || hwm < 0) { - const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'; - throw new ERR_INVALID_ARG_VALUE(name, hwm); + const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark' + throw new ERR_INVALID_ARG_VALUE(name, hwm) } - return MathFloor(hwm); - } - // Default value - return getDefaultHighWaterMark(state.objectMode); + return MathFloor(hwm) + } // Default value + + return getDefaultHighWaterMark(state.objectMode) } module.exports = { getHighWaterMark, getDefaultHighWaterMark -}; +} diff --git a/lib/internal/streams/transform.js b/lib/internal/streams/transform.js index fe6f5cffa4..5a34c250be 100644 --- a/lib/internal/streams/transform.js +++ b/lib/internal/streams/transform.js @@ -18,7 +18,6 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // a transform stream is a readable/writable stream where you do // something with the data. Sometimes it's called a "filter", // but that's not a great name for it, since that implies a thing where @@ -60,50 +59,38 @@ // However, even in such a pathological case, only a single written chunk // would be consumed, and then the rest would wait (un-transformed) until // the results of the previous transformed chunk were consumed. +'use strict' -'use strict'; - -const { - ObjectSetPrototypeOf, - Symbol -} = require('../../ours/primordials'); +const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials') -module.exports = Transform; -const { - ERR_METHOD_NOT_IMPLEMENTED -} = require('../../ours/errors').codes; -const Duplex = require('./duplex'); -ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); -ObjectSetPrototypeOf(Transform, Duplex); +module.exports = Transform -const kCallback = Symbol('kCallback'); +const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes -function Transform(options) { - if (!(this instanceof Transform)) - return new Transform(options); +const Duplex = require('./duplex') - Duplex.call(this, options); +ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype) +ObjectSetPrototypeOf(Transform, Duplex) +const kCallback = Symbol('kCallback') - // We have implemented the _read method, and done the other things +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options) + Duplex.call(this, options) // We have implemented the _read method, and done the other things // that Readable wants before the first _read call, so unset the // sync guard flag. - this._readableState.sync = false; - this[kCallback] = null; + this._readableState.sync = false + this[kCallback] = null if (options) { - if (typeof options.transform === 'function') - this._transform = options.transform; - - if (typeof options.flush === 'function') - this._flush = options.flush; - } - - // When the writable side finishes, then flush out anything remaining. + if (typeof options.transform === 'function') this._transform = options.transform + if (typeof options.flush === 'function') this._flush = options.flush + } // When the writable side finishes, then flush out anything remaining. // Backwards compat. Some Transform streams incorrectly implement _final // instead of or in addition to _flush. By using 'prefinish' instead of // implementing _final we continue supporting this unfortunate use case. - this.on('prefinish', prefinish); + + this.on('prefinish', prefinish) } function final(cb) { @@ -111,54 +98,58 @@ function final(cb) { this._flush((er, data) => { if (er) { if (cb) { - cb(er); + cb(er) } else { - this.destroy(er); + this.destroy(er) } - return; + + return } if (data != null) { - this.push(data); + this.push(data) } - this.push(null); + + this.push(null) + if (cb) { - cb(); + cb() } - }); + }) } else { - this.push(null); + this.push(null) + if (cb) { - cb(); + cb() } } } function prefinish() { if (this._final !== final) { - final.call(this); + final.call(this) } } -Transform.prototype._final = final; +Transform.prototype._final = final -Transform.prototype._transform = function(chunk, encoding, callback) { - throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()'); -}; +Transform.prototype._transform = function (chunk, encoding, callback) { + throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()') +} -Transform.prototype._write = function(chunk, encoding, callback) { - const rState = this._readableState; - const wState = this._writableState; - const length = rState.length; +Transform.prototype._write = function (chunk, encoding, callback) { + const rState = this._readableState + const wState = this._writableState + const length = rState.length this._transform(chunk, encoding, (err, val) => { if (err) { - callback(err); - return; + callback(err) + return } if (val != null) { - this.push(val); + this.push(val) } if ( @@ -168,17 +159,17 @@ Transform.prototype._write = function(chunk, encoding, callback) { rState.highWaterMark === 0 || rState.length === 0 ) { - callback(); + callback() } else { - this[kCallback] = callback; + this[kCallback] = callback } - }); -}; + }) +} -Transform.prototype._read = function() { +Transform.prototype._read = function () { if (this[kCallback]) { - const callback = this[kCallback]; - this[kCallback] = null; - callback(); + const callback = this[kCallback] + this[kCallback] = null + callback() } -}; +} diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js index 0ac9821710..b1aa7d8170 100644 --- a/lib/internal/streams/utils.js +++ b/lib/internal/streams/utils.js @@ -1,206 +1,214 @@ -'use strict'; +'use strict' -const { - Symbol, - SymbolAsyncIterator, - SymbolIterator, -} = require('../../ours/primordials'); +const { Symbol, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials') -const kDestroyed = Symbol('kDestroyed'); -const kIsErrored = Symbol('kIsErrored'); -const kIsReadable = Symbol('kIsReadable'); -const kIsDisturbed = Symbol('kIsDisturbed'); +const kDestroyed = Symbol('kDestroyed') +const kIsErrored = Symbol('kIsErrored') +const kIsReadable = Symbol('kIsReadable') +const kIsDisturbed = Symbol('kIsDisturbed') function isReadableNodeStream(obj, strict = false) { + var _obj$_readableState + return !!( - obj && - typeof obj.pipe === 'function' && - typeof obj.on === 'function' && ( - !strict || - (typeof obj.pause === 'function' && typeof obj.resume === 'function') - ) && - (!obj._writableState || obj._readableState?.readable !== false) && // Duplex - (!obj._writableState || obj._readableState) // Writable has .pipe. - ); + obj && + typeof obj.pipe === 'function' && + typeof obj.on === 'function' && + (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) && + (!obj._writableState || + ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined + ? undefined + : _obj$_readableState.readable) !== false) && // Duplex + (!obj._writableState || obj._readableState) + ) // Writable has .pipe. + ) } function isWritableNodeStream(obj) { + var _obj$_writableState + return !!( - obj && - typeof obj.write === 'function' && - typeof obj.on === 'function' && - (!obj._readableState || obj._writableState?.writable !== false) // Duplex - ); + ( + obj && + typeof obj.write === 'function' && + typeof obj.on === 'function' && + (!obj._readableState || + ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined + ? undefined + : _obj$_writableState.writable) !== false) + ) // Duplex + ) } function isDuplexNodeStream(obj) { return !!( obj && - (typeof obj.pipe === 'function' && obj._readableState) && + typeof obj.pipe === 'function' && + obj._readableState && typeof obj.on === 'function' && typeof obj.write === 'function' - ); + ) } function isNodeStream(obj) { return ( obj && - ( - obj._readableState || + (obj._readableState || obj._writableState || (typeof obj.write === 'function' && typeof obj.on === 'function') || - (typeof obj.pipe === 'function' && typeof obj.on === 'function') - ) - ); + (typeof obj.pipe === 'function' && typeof obj.on === 'function')) + ) } function isIterable(obj, isAsync) { - if (obj == null) return false; - if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'; - if (isAsync === false) return typeof obj[SymbolIterator] === 'function'; - return typeof obj[SymbolAsyncIterator] === 'function' || - typeof obj[SymbolIterator] === 'function'; + if (obj == null) return false + if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function' + if (isAsync === false) return typeof obj[SymbolIterator] === 'function' + return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function' } function isDestroyed(stream) { - if (!isNodeStream(stream)) return null; - const wState = stream._writableState; - const rState = stream._readableState; - const state = wState || rState; - return !!(stream.destroyed || stream[kDestroyed] || state?.destroyed); -} + if (!isNodeStream(stream)) return null + const wState = stream._writableState + const rState = stream._readableState + const state = wState || rState + return !!(stream.destroyed || stream[kDestroyed] || (state !== null && state !== undefined && state.destroyed)) +} // Have been end():d. -// Have been end():d. function isWritableEnded(stream) { - if (!isWritableNodeStream(stream)) return null; - if (stream.writableEnded === true) return true; - const wState = stream._writableState; - if (wState?.errored) return false; - if (typeof wState?.ended !== 'boolean') return null; - return wState.ended; -} + if (!isWritableNodeStream(stream)) return null + if (stream.writableEnded === true) return true + const wState = stream._writableState + if (wState !== null && wState !== undefined && wState.errored) return false + if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null + return wState.ended +} // Have emitted 'finish'. -// Have emitted 'finish'. function isWritableFinished(stream, strict) { - if (!isWritableNodeStream(stream)) return null; - if (stream.writableFinished === true) return true; - const wState = stream._writableState; - if (wState?.errored) return false; - if (typeof wState?.finished !== 'boolean') return null; - return !!( - wState.finished || - (strict === false && wState.ended === true && wState.length === 0) - ); -} + if (!isWritableNodeStream(stream)) return null + if (stream.writableFinished === true) return true + const wState = stream._writableState + if (wState !== null && wState !== undefined && wState.errored) return false + if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null + return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0)) +} // Have been push(null):d. -// Have been push(null):d. function isReadableEnded(stream) { - if (!isReadableNodeStream(stream)) return null; - if (stream.readableEnded === true) return true; - const rState = stream._readableState; - if (!rState || rState.errored) return false; - if (typeof rState?.ended !== 'boolean') return null; - return rState.ended; -} + if (!isReadableNodeStream(stream)) return null + if (stream.readableEnded === true) return true + const rState = stream._readableState + if (!rState || rState.errored) return false + if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null + return rState.ended +} // Have emitted 'end'. -// Have emitted 'end'. function isReadableFinished(stream, strict) { - if (!isReadableNodeStream(stream)) return null; - const rState = stream._readableState; - if (rState?.errored) return false; - if (typeof rState?.endEmitted !== 'boolean') return null; - return !!( - rState.endEmitted || - (strict === false && rState.ended === true && rState.length === 0) - ); + if (!isReadableNodeStream(stream)) return null + const rState = stream._readableState + if (rState !== null && rState !== undefined && rState.errored) return false + if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null + return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0)) } function isReadable(stream) { - if (stream && stream[kIsReadable] != null) return stream[kIsReadable]; - if (typeof stream?.readable !== 'boolean') return null; - if (isDestroyed(stream)) return false; - return isReadableNodeStream(stream) && - stream.readable && - !isReadableFinished(stream); + if (stream && stream[kIsReadable] != null) return stream[kIsReadable] + if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null + if (isDestroyed(stream)) return false + return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream) } function isWritable(stream) { - if (typeof stream?.writable !== 'boolean') return null; - if (isDestroyed(stream)) return false; - return isWritableNodeStream(stream) && - stream.writable && - !isWritableEnded(stream); + if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null + if (isDestroyed(stream)) return false + return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream) } function isFinished(stream, opts) { if (!isNodeStream(stream)) { - return null; + return null } if (isDestroyed(stream)) { - return true; + return true } - if (opts?.readable !== false && isReadable(stream)) { - return false; + if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) { + return false } - if (opts?.writable !== false && isWritable(stream)) { - return false; + if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) { + return false } - return true; + return true } function isWritableErrored(stream) { + var _stream$_writableStat, _stream$_writableStat2 + if (!isNodeStream(stream)) { - return null; + return null } if (stream.writableErrored) { - return stream.writableErrored; + return stream.writableErrored } - return stream._writableState?.errored ?? null; + return (_stream$_writableStat = + (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined + ? undefined + : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined + ? _stream$_writableStat + : null } function isReadableErrored(stream) { + var _stream$_readableStat, _stream$_readableStat2 + if (!isNodeStream(stream)) { - return null; + return null } if (stream.readableErrored) { - return stream.readableErrored; + return stream.readableErrored } - return stream._readableState?.errored ?? null; + return (_stream$_readableStat = + (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined + ? undefined + : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined + ? _stream$_readableStat + : null } function isClosed(stream) { if (!isNodeStream(stream)) { - return null; + return null } if (typeof stream.closed === 'boolean') { - return stream.closed; + return stream.closed } - const wState = stream._writableState; - const rState = stream._readableState; + const wState = stream._writableState + const rState = stream._readableState if ( - typeof wState?.closed === 'boolean' || - typeof rState?.closed === 'boolean' + typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' || + typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean' ) { - return wState?.closed || rState?.closed; + return ( + (wState === null || wState === undefined ? undefined : wState.closed) || + (rState === null || rState === undefined ? undefined : rState.closed) + ) } if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) { - return stream._closed; + return stream._closed } - return null; + return null } function isOutgoingMessage(stream) { @@ -209,56 +217,86 @@ function isOutgoingMessage(stream) { typeof stream._defaultKeepAlive === 'boolean' && typeof stream._removedConnection === 'boolean' && typeof stream._removedContLen === 'boolean' - ); + ) } function isServerResponse(stream) { - return ( - typeof stream._sent100 === 'boolean' && - isOutgoingMessage(stream) - ); + return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream) } function isServerRequest(stream) { + var _stream$req + return ( typeof stream._consuming === 'boolean' && typeof stream._dumped === 'boolean' && - stream.req?.upgradeOrConnect === undefined - ); + ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) === + undefined + ) } function willEmitClose(stream) { - if (!isNodeStream(stream)) return null; - - const wState = stream._writableState; - const rState = stream._readableState; - const state = wState || rState; - - return (!state && isServerResponse(stream)) || !!( - state && - state.autoDestroy && - state.emitClose && - state.closed === false - ); + if (!isNodeStream(stream)) return null + const wState = stream._writableState + const rState = stream._readableState + const state = wState || rState + return ( + (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false) + ) } function isDisturbed(stream) { - return !!(stream && ( - stream[kIsDisturbed] ?? - (stream.readableDidRead || stream.readableAborted) - )); + var _stream$kIsDisturbed + + return !!( + stream && + ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined + ? _stream$kIsDisturbed + : stream.readableDidRead || stream.readableAborted) + ) } function isErrored(stream) { - return !!(stream && ( - stream[kIsErrored] ?? - stream.readableErrored ?? - stream.writableErrored ?? - stream._readableState?.errorEmitted ?? - stream._writableState?.errorEmitted ?? - stream._readableState?.errored ?? - stream._writableState?.errored - )); + var _ref, + _ref2, + _ref3, + _ref4, + _ref5, + _stream$kIsErrored, + _stream$_readableStat3, + _stream$_writableStat3, + _stream$_readableStat4, + _stream$_writableStat4 + + return !!( + stream && + ((_ref = + (_ref2 = + (_ref3 = + (_ref4 = + (_ref5 = + (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined + ? _stream$kIsErrored + : stream.readableErrored) !== null && _ref5 !== undefined + ? _ref5 + : stream.writableErrored) !== null && _ref4 !== undefined + ? _ref4 + : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined + ? undefined + : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined + ? _ref3 + : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined + ? undefined + : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined + ? _ref2 + : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined + ? undefined + : _stream$_readableStat4.errored) !== null && _ref !== undefined + ? _ref + : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined + ? undefined + : _stream$_writableStat4.errored) + ) } module.exports = { @@ -286,5 +324,5 @@ module.exports = { isWritableErrored, isServerRequest, isServerResponse, - willEmitClose, -}; + willEmitClose +} diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index a0516e8876..6cb99f3a0a 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -18,12 +18,10 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // A bit simpler than readable streams. // Implement an async ._write(chunk, encoding, cb), and it'll handle all // the drain event emission and buffering. - -'use strict'; +'use strict' const { ArrayPrototypeSlice, @@ -34,25 +32,24 @@ const { ObjectSetPrototypeOf, StringPrototypeToLowerCase, Symbol, - SymbolHasInstance, -} = require('../../ours/primordials'); + SymbolHasInstance +} = require('../../ours/primordials') -module.exports = Writable; -Writable.WritableState = WritableState; +module.exports = Writable +Writable.WritableState = WritableState -const EE = require('events'); -const Stream = require('./legacy').Stream; -const { Buffer } = require('buffer'); -const destroyImpl = require('./destroy'); +const EE = require('events') -const { - addAbortSignal, -} = require('./add-abort-signal'); +const Stream = require('./legacy').Stream + +const { Buffer } = require('buffer') + +const destroyImpl = require('./destroy') + +const { addAbortSignal } = require('./add-abort-signal') + +const { getHighWaterMark, getDefaultHighWaterMark } = require('./state') -const { - getHighWaterMark, - getDefaultHighWaterMark -} = require('./state'); const { ERR_INVALID_ARG_TYPE, ERR_METHOD_NOT_IMPLEMENTED, @@ -63,16 +60,15 @@ const { ERR_STREAM_NULL_VALUES, ERR_STREAM_WRITE_AFTER_END, ERR_UNKNOWN_ENCODING -} = require('../../ours/errors').codes; - -const { errorOrDestroy } = destroyImpl; +} = require('../../ours/errors').codes -ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); -ObjectSetPrototypeOf(Writable, Stream); +const { errorOrDestroy } = destroyImpl +ObjectSetPrototypeOf(Writable.prototype, Stream.prototype) +ObjectSetPrototypeOf(Writable, Stream) function nop() {} -const kOnFinished = Symbol('kOnFinished'); +const kOnFinished = Symbol('kOnFinished') function WritableState(options, stream, isDuplex) { // Duplex streams are both readable and writable, but share @@ -80,386 +76,329 @@ function WritableState(options, stream, isDuplex) { // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream, // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - if (typeof isDuplex !== 'boolean') - isDuplex = stream instanceof require('./duplex'); - - // Object stream flag to indicate whether or not this stream + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex') // Object stream flag to indicate whether or not this stream // contains buffers or objects. - this.objectMode = !!(options && options.objectMode); - if (isDuplex) - this.objectMode = this.objectMode || - !!(options && options.writableObjectMode); - - // The point at which write() starts returning false + this.objectMode = !!(options && options.objectMode) + if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode) // The point at which write() starts returning false // Note: 0 is a valid value, means that we always return false if // the entire buffer is not flushed immediately on write(). - this.highWaterMark = options ? - getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) : - getDefaultHighWaterMark(false); - - // if _final has been called. - this.finalCalled = false; - - // drain event flag. - this.needDrain = false; - // At the start of calling end() - this.ending = false; - // When end() has been called, and returned. - this.ended = false; - // When 'finish' is emitted. - this.finished = false; - - // Has it been destroyed - this.destroyed = false; - - // Should we decode strings into buffers before passing to _write? + + this.highWaterMark = options + ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) + : getDefaultHighWaterMark(false) // if _final has been called. + + this.finalCalled = false // drain event flag. + + this.needDrain = false // At the start of calling end() + + this.ending = false // When end() has been called, and returned. + + this.ended = false // When 'finish' is emitted. + + this.finished = false // Has it been destroyed + + this.destroyed = false // Should we decode strings into buffers before passing to _write? // this is here so that some node-core streams can optimize string // handling at a lower level. - const noDecode = !!(options && options.decodeStrings === false); - this.decodeStrings = !noDecode; - // Crypto is kind of old and crusty. Historically, its default string + const noDecode = !!(options && options.decodeStrings === false) + this.decodeStrings = !noDecode // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'; - // Not an actual buffer we keep track of, but a measurement + this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' // Not an actual buffer we keep track of, but a measurement // of how much we're waiting to get pushed to some underlying // socket or file. - this.length = 0; - // A flag to see when we're in the middle of a write. - this.writing = false; + this.length = 0 // A flag to see when we're in the middle of a write. - // When true all writes will be buffered until .uncork() call. - this.corked = 0; + this.writing = false // When true all writes will be buffered until .uncork() call. - // A flag to be able to tell if the onwrite cb is called immediately, + this.corked = 0 // A flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. - this.sync = true; - // A flag to know if we're processing previously buffered items, which + this.sync = true // A flag to know if we're processing previously buffered items, which // may call the _write() callback in the same tick, so that we don't // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - // The callback that's passed to _write(chunk, cb). - this.onwrite = onwrite.bind(undefined, stream); + this.bufferProcessing = false // The callback that's passed to _write(chunk, cb). - // The callback that the user supplies to write(chunk, encoding, cb). - this.writecb = null; + this.onwrite = onwrite.bind(undefined, stream) // The callback that the user supplies to write(chunk, encoding, cb). - // The amount that is being written when _write is called. - this.writelen = 0; + this.writecb = null // The amount that is being written when _write is called. - // Storage for data passed to the afterWrite() callback in case of + this.writelen = 0 // Storage for data passed to the afterWrite() callback in case of // synchronous _write() completion. - this.afterWriteTickInfo = null; - - resetBuffer(this); - // Number of pending user-supplied write callbacks + this.afterWriteTickInfo = null + resetBuffer(this) // Number of pending user-supplied write callbacks // this must be 0 before 'finish' can be emitted. - this.pendingcb = 0; - // Stream is still being constructed and cannot be + this.pendingcb = 0 // Stream is still being constructed and cannot be // destroyed until construction finished or failed. // Async construction is opt in, therefore we start as // constructed. - this.constructed = true; - // Emit prefinish if the only thing we're waiting for is _write cbs + this.constructed = true // Emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams. - this.prefinished = false; - // True if the error was already emitted and should not be thrown again. - this.errorEmitted = false; + this.prefinished = false // True if the error was already emitted and should not be thrown again. - // Should close be emitted on destroy. Defaults to true. - this.emitClose = !options || options.emitClose !== false; + this.errorEmitted = false // Should close be emitted on destroy. Defaults to true. - // Should .destroy() be called after 'finish' (and potentially 'end'). - this.autoDestroy = !options || options.autoDestroy !== false; + this.emitClose = !options || options.emitClose !== false // Should .destroy() be called after 'finish' (and potentially 'end'). - // Indicates whether the stream has errored. When true all write() calls + this.autoDestroy = !options || options.autoDestroy !== false // Indicates whether the stream has errored. When true all write() calls // should return false. This is needed since when autoDestroy // is disabled we need a way to tell whether the stream has failed. - this.errored = null; - // Indicates whether the stream has finished destroying. - this.closed = false; + this.errored = null // Indicates whether the stream has finished destroying. - // True if close has been emitted or would have been emitted + this.closed = false // True if close has been emitted or would have been emitted // depending on emitClose. - this.closeEmitted = false; - this[kOnFinished] = []; + this.closeEmitted = false + this[kOnFinished] = [] } function resetBuffer(state) { - state.buffered = []; - state.bufferedIndex = 0; - state.allBuffers = true; - state.allNoop = true; + state.buffered = [] + state.bufferedIndex = 0 + state.allBuffers = true + state.allNoop = true } WritableState.prototype.getBuffer = function getBuffer() { - return ArrayPrototypeSlice(this.buffered, this.bufferedIndex); -}; + return ArrayPrototypeSlice(this.buffered, this.bufferedIndex) +} ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', { get() { - return this.buffered.length - this.bufferedIndex; + return this.buffered.length - this.bufferedIndex } -}); +}) function Writable(options) { // Writable ctor is applied to Duplexes, too. // `realHasInstance` is necessary because using plain `instanceof` // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the // Node.js LazyTransform implementation, which has a non-trivial getter for // `_writableState` that would lead to infinite recursion. - // Checking for a Stream.Duplex instance is faster here instead of inside // the WritableState constructor, at least with V8 6.5. - const isDuplex = (this instanceof require('./duplex')); + const isDuplex = this instanceof require('./duplex') - if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) - return new Writable(options); - - this._writableState = new WritableState(options, this, isDuplex); + if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options) + this._writableState = new WritableState(options, this, isDuplex) if (options) { - if (typeof options.write === 'function') - this._write = options.write; - - if (typeof options.writev === 'function') - this._writev = options.writev; - - if (typeof options.destroy === 'function') - this._destroy = options.destroy; - - if (typeof options.final === 'function') - this._final = options.final; - - if (typeof options.construct === 'function') - this._construct = options.construct; - - if (options.signal) - addAbortSignal(options.signal, this); + if (typeof options.write === 'function') this._write = options.write + if (typeof options.writev === 'function') this._writev = options.writev + if (typeof options.destroy === 'function') this._destroy = options.destroy + if (typeof options.final === 'function') this._final = options.final + if (typeof options.construct === 'function') this._construct = options.construct + if (options.signal) addAbortSignal(options.signal, this) } - Stream.call(this, options); - + Stream.call(this, options) destroyImpl.construct(this, () => { - const state = this._writableState; + const state = this._writableState if (!state.writing) { - clearBuffer(this, state); + clearBuffer(this, state) } - finishMaybe(this, state); - }); + finishMaybe(this, state) + }) } ObjectDefineProperty(Writable, SymbolHasInstance, { - value: function(object) { - if (FunctionPrototypeSymbolHasInstance(this, object)) return true; - if (this !== Writable) return false; - - return object && object._writableState instanceof WritableState; - }, -}); + value: function (object) { + if (FunctionPrototypeSymbolHasInstance(this, object)) return true + if (this !== Writable) return false + return object && object._writableState instanceof WritableState + } +}) // Otherwise people can pipe Writable streams, which is just wrong. -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function() { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()) +} function _write(stream, chunk, encoding, cb) { - const state = stream._writableState; + const state = stream._writableState if (typeof encoding === 'function') { - cb = encoding; - encoding = state.defaultEncoding; + cb = encoding + encoding = state.defaultEncoding } else { - if (!encoding) - encoding = state.defaultEncoding; - else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) - throw new ERR_UNKNOWN_ENCODING(encoding); - if (typeof cb !== 'function') - cb = nop; + if (!encoding) encoding = state.defaultEncoding + else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) + if (typeof cb !== 'function') cb = nop } if (chunk === null) { - throw new ERR_STREAM_NULL_VALUES(); + throw new ERR_STREAM_NULL_VALUES() } else if (!state.objectMode) { if (typeof chunk === 'string') { if (state.decodeStrings !== false) { - chunk = Buffer.from(chunk, encoding); - encoding = 'buffer'; + chunk = Buffer.from(chunk, encoding) + encoding = 'buffer' } } else if (chunk instanceof Buffer) { - encoding = 'buffer'; + encoding = 'buffer' } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk); - encoding = 'buffer'; + chunk = Stream._uint8ArrayToBuffer(chunk) + encoding = 'buffer' } else { - throw new ERR_INVALID_ARG_TYPE( - 'chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) } } - let err; + let err + if (state.ending) { - err = new ERR_STREAM_WRITE_AFTER_END(); + err = new ERR_STREAM_WRITE_AFTER_END() } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('write'); + err = new ERR_STREAM_DESTROYED('write') } if (err) { - process.nextTick(cb, err); - errorOrDestroy(stream, err, true); - return err; + process.nextTick(cb, err) + errorOrDestroy(stream, err, true) + return err } - state.pendingcb++; - return writeOrBuffer(stream, state, chunk, encoding, cb); + + state.pendingcb++ + return writeOrBuffer(stream, state, chunk, encoding, cb) } -Writable.prototype.write = function(chunk, encoding, cb) { - return _write(this, chunk, encoding, cb) === true; -}; +Writable.prototype.write = function (chunk, encoding, cb) { + return _write(this, chunk, encoding, cb) === true +} -Writable.prototype.cork = function() { - this._writableState.corked++; -}; +Writable.prototype.cork = function () { + this._writableState.corked++ +} -Writable.prototype.uncork = function() { - const state = this._writableState; +Writable.prototype.uncork = function () { + const state = this._writableState if (state.corked) { - state.corked--; - - if (!state.writing) - clearBuffer(this, state); + state.corked-- + if (!state.writing) clearBuffer(this, state) } -}; +} Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') - encoding = StringPrototypeToLowerCase(encoding); - if (!Buffer.isEncoding(encoding)) - throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; - -// If we're already writing something, then just put this + if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding) + if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) + this._writableState.defaultEncoding = encoding + return this +} // If we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. + function writeOrBuffer(stream, state, chunk, encoding, callback) { - const len = state.objectMode ? 1 : chunk.length; + const len = state.objectMode ? 1 : chunk.length + state.length += len // stream._write resets state.length - state.length += len; + const ret = state.length < state.highWaterMark // We must ensure that previous needDrain will not be reset to false. - // stream._write resets state.length - const ret = state.length < state.highWaterMark; - // We must ensure that previous needDrain will not be reset to false. - if (!ret) - state.needDrain = true; + if (!ret) state.needDrain = true if (state.writing || state.corked || state.errored || !state.constructed) { - state.buffered.push({ chunk, encoding, callback }); + state.buffered.push({ + chunk, + encoding, + callback + }) + if (state.allBuffers && encoding !== 'buffer') { - state.allBuffers = false; + state.allBuffers = false } + if (state.allNoop && callback !== nop) { - state.allNoop = false; + state.allNoop = false } } else { - state.writelen = len; - state.writecb = callback; - state.writing = true; - state.sync = true; - stream._write(chunk, encoding, state.onwrite); - state.sync = false; - } + state.writelen = len + state.writecb = callback + state.writing = true + state.sync = true + + stream._write(chunk, encoding, state.onwrite) - // Return false if errored or destroyed in order to break + state.sync = false + } // Return false if errored or destroyed in order to break // any synchronous while(stream.write(data)) loops. - return ret && !state.errored && !state.destroyed; + + return ret && !state.errored && !state.destroyed } function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) - state.onwrite(new ERR_STREAM_DESTROYED('write')); - else if (writev) - stream._writev(chunk, state.onwrite); - else - stream._write(chunk, encoding, state.onwrite); - state.sync = false; + state.writelen = len + state.writecb = cb + state.writing = true + state.sync = true + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write')) + else if (writev) stream._writev(chunk, state.onwrite) + else stream._write(chunk, encoding, state.onwrite) + state.sync = false } function onwriteError(stream, state, er, cb) { - --state.pendingcb; - - cb(er); - // Ensure callbacks are invoked even when autoDestroy is + --state.pendingcb + cb(er) // Ensure callbacks are invoked even when autoDestroy is // not enabled. Passing `er` here doesn't make sense since // it's related to one specific write, not to the buffered // writes. - errorBuffer(state); - // This can emit error, but error must always follow cb. - errorOrDestroy(stream, er); + + errorBuffer(state) // This can emit error, but error must always follow cb. + + errorOrDestroy(stream, er) } function onwrite(stream, er) { - const state = stream._writableState; - const sync = state.sync; - const cb = state.writecb; + const state = stream._writableState + const sync = state.sync + const cb = state.writecb if (typeof cb !== 'function') { - errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()); - return; + errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()) + return } - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; + state.writing = false + state.writecb = null + state.length -= state.writelen + state.writelen = 0 if (er) { // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - er.stack; // eslint-disable-line no-unused-expressions + er.stack // eslint-disable-line no-unused-expressions if (!state.errored) { - state.errored = er; - } - - // In case of duplex streams we need to notify the readable side of the + state.errored = er + } // In case of duplex streams we need to notify the readable side of the // error. + if (stream._readableState && !stream._readableState.errored) { - stream._readableState.errored = er; + stream._readableState.errored = er } if (sync) { - process.nextTick(onwriteError, stream, state, er, cb); + process.nextTick(onwriteError, stream, state, er, cb) } else { - onwriteError(stream, state, er, cb); + onwriteError(stream, state, er, cb) } } else { if (state.buffered.length > state.bufferedIndex) { - clearBuffer(stream, state); + clearBuffer(stream, state) } if (sync) { @@ -467,156 +406,175 @@ function onwrite(stream, er) { // the same. In that case, we do not schedule a new nextTick(), but // rather just increase a counter, to improve performance and avoid // memory allocations. - if (state.afterWriteTickInfo !== null && - state.afterWriteTickInfo.cb === cb) { - state.afterWriteTickInfo.count++; + if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) { + state.afterWriteTickInfo.count++ } else { - state.afterWriteTickInfo = { count: 1, cb, stream, state }; - process.nextTick(afterWriteTick, state.afterWriteTickInfo); + state.afterWriteTickInfo = { + count: 1, + cb, + stream, + state + } + process.nextTick(afterWriteTick, state.afterWriteTickInfo) } } else { - afterWrite(stream, state, 1, cb); + afterWrite(stream, state, 1, cb) } } } function afterWriteTick({ stream, state, count, cb }) { - state.afterWriteTickInfo = null; - return afterWrite(stream, state, count, cb); + state.afterWriteTickInfo = null + return afterWrite(stream, state, count, cb) } function afterWrite(stream, state, count, cb) { - const needDrain = !state.ending && !stream.destroyed && state.length === 0 && - state.needDrain; + const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain + if (needDrain) { - state.needDrain = false; - stream.emit('drain'); + state.needDrain = false + stream.emit('drain') } while (count-- > 0) { - state.pendingcb--; - cb(); + state.pendingcb-- + cb() } if (state.destroyed) { - errorBuffer(state); + errorBuffer(state) } - finishMaybe(stream, state); -} + finishMaybe(stream, state) +} // If there's something in the buffer waiting, then invoke callbacks. -// If there's something in the buffer waiting, then invoke callbacks. function errorBuffer(state) { if (state.writing) { - return; + return } for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { - const { chunk, callback } = state.buffered[n]; - const len = state.objectMode ? 1 : chunk.length; - state.length -= len; - callback(state.errored ?? new ERR_STREAM_DESTROYED('write')); + var _state$errored + + const { chunk, callback } = state.buffered[n] + const len = state.objectMode ? 1 : chunk.length + state.length -= len + callback( + (_state$errored = state.errored) !== null && _state$errored !== undefined + ? _state$errored + : new ERR_STREAM_DESTROYED('write') + ) } - const onfinishCallbacks = state[kOnFinished].splice(0); + const onfinishCallbacks = state[kOnFinished].splice(0) + for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](state.errored ?? new ERR_STREAM_DESTROYED('end')); + var _state$errored2 + + onfinishCallbacks[i]( + (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined + ? _state$errored2 + : new ERR_STREAM_DESTROYED('end') + ) } - resetBuffer(state); -} + resetBuffer(state) +} // If there's something in the buffer waiting, then process it. -// If there's something in the buffer waiting, then process it. function clearBuffer(stream, state) { - if (state.corked || - state.bufferProcessing || - state.destroyed || - !state.constructed) { - return; + if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) { + return } - const { buffered, bufferedIndex, objectMode } = state; - const bufferedLength = buffered.length - bufferedIndex; + const { buffered, bufferedIndex, objectMode } = state + const bufferedLength = buffered.length - bufferedIndex if (!bufferedLength) { - return; + return } - let i = bufferedIndex; + let i = bufferedIndex + state.bufferProcessing = true - state.bufferProcessing = true; if (bufferedLength > 1 && stream._writev) { - state.pendingcb -= bufferedLength - 1; - - const callback = state.allNoop ? nop : (err) => { - for (let n = i; n < buffered.length; ++n) { - buffered[n].callback(err); - } - }; - // Make a copy of `buffered` if it's going to be used by `callback` above, + state.pendingcb -= bufferedLength - 1 + const callback = state.allNoop + ? nop + : (err) => { + for (let n = i; n < buffered.length; ++n) { + buffered[n].callback(err) + } + } // Make a copy of `buffered` if it's going to be used by `callback` above, // since `doWrite` will mutate the array. - const chunks = state.allNoop && i === 0 ? - buffered : ArrayPrototypeSlice(buffered, i); - chunks.allBuffers = state.allBuffers; - doWrite(stream, state, true, state.length, chunks, '', callback); - - resetBuffer(state); + const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i) + chunks.allBuffers = state.allBuffers + doWrite(stream, state, true, state.length, chunks, '', callback) + resetBuffer(state) } else { do { - const { chunk, encoding, callback } = buffered[i]; - buffered[i++] = null; - const len = objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, callback); - } while (i < buffered.length && !state.writing); + const { chunk, encoding, callback } = buffered[i] + buffered[i++] = null + const len = objectMode ? 1 : chunk.length + doWrite(stream, state, false, len, chunk, encoding, callback) + } while (i < buffered.length && !state.writing) if (i === buffered.length) { - resetBuffer(state); + resetBuffer(state) } else if (i > 256) { - buffered.splice(0, i); - state.bufferedIndex = 0; + buffered.splice(0, i) + state.bufferedIndex = 0 } else { - state.bufferedIndex = i; + state.bufferedIndex = i } } - state.bufferProcessing = false; + + state.bufferProcessing = false } -Writable.prototype._write = function(chunk, encoding, cb) { +Writable.prototype._write = function (chunk, encoding, cb) { if (this._writev) { - this._writev([{ chunk, encoding }], cb); + this._writev( + [ + { + chunk, + encoding + } + ], + cb + ) } else { - throw new ERR_METHOD_NOT_IMPLEMENTED('_write()'); + throw new ERR_METHOD_NOT_IMPLEMENTED('_write()') } -}; +} -Writable.prototype._writev = null; +Writable.prototype._writev = null -Writable.prototype.end = function(chunk, encoding, cb) { - const state = this._writableState; +Writable.prototype.end = function (chunk, encoding, cb) { + const state = this._writableState if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; + cb = chunk + chunk = null + encoding = null } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + cb = encoding + encoding = null } - let err; + let err if (chunk !== null && chunk !== undefined) { - const ret = _write(this, chunk, encoding); + const ret = _write(this, chunk, encoding) + if (ret instanceof Error) { - err = ret; + err = ret } - } + } // .end() fully uncorks. - // .end() fully uncorks. if (state.corked) { - state.corked = 1; - this.uncork(); + state.corked = 1 + this.uncork() } if (err) { @@ -627,286 +585,278 @@ Writable.prototype.end = function(chunk, encoding, cb) { // hard error can be disproportionately destructive. It is not always // trivial for the user to determine whether end() needs to be called // or not. - - state.ending = true; - finishMaybe(this, state, true); - state.ended = true; + state.ending = true + finishMaybe(this, state, true) + state.ended = true } else if (state.finished) { - err = new ERR_STREAM_ALREADY_FINISHED('end'); + err = new ERR_STREAM_ALREADY_FINISHED('end') } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('end'); + err = new ERR_STREAM_DESTROYED('end') } if (typeof cb === 'function') { if (err || state.finished) { - process.nextTick(cb, err); + process.nextTick(cb, err) } else { - state[kOnFinished].push(cb); + state[kOnFinished].push(cb) } } - return this; -}; + return this +} function needFinish(state) { - return (state.ending && - !state.destroyed && - state.constructed && - state.length === 0 && - !state.errored && - state.buffered.length === 0 && - !state.finished && - !state.writing && - !state.errorEmitted && - !state.closeEmitted); + return ( + state.ending && + !state.destroyed && + state.constructed && + state.length === 0 && + !state.errored && + state.buffered.length === 0 && + !state.finished && + !state.writing && + !state.errorEmitted && + !state.closeEmitted + ) } function callFinal(stream, state) { - let called = false; + let called = false function onFinish(err) { if (called) { - errorOrDestroy(stream, err ?? ERR_MULTIPLE_CALLBACK()); - return; + errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK()) + return } - called = true; - state.pendingcb--; + called = true + state.pendingcb-- + if (err) { - const onfinishCallbacks = state[kOnFinished].splice(0); + const onfinishCallbacks = state[kOnFinished].splice(0) + for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](err); + onfinishCallbacks[i](err) } - errorOrDestroy(stream, err, state.sync); + + errorOrDestroy(stream, err, state.sync) } else if (needFinish(state)) { - state.prefinished = true; - stream.emit('prefinish'); - // Backwards compat. Don't check state.sync here. + state.prefinished = true + stream.emit('prefinish') // Backwards compat. Don't check state.sync here. // Some streams assume 'finish' will be emitted // asynchronously relative to _final callback. - state.pendingcb++; - process.nextTick(finish, stream, state); + + state.pendingcb++ + process.nextTick(finish, stream, state) } } - state.sync = true; - state.pendingcb++; + state.sync = true + state.pendingcb++ try { - stream._final(onFinish); + stream._final(onFinish) } catch (err) { - onFinish(err); + onFinish(err) } - state.sync = false; + state.sync = false } function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { if (typeof stream._final === 'function' && !state.destroyed) { - state.finalCalled = true; - callFinal(stream, state); + state.finalCalled = true + callFinal(stream, state) } else { - state.prefinished = true; - stream.emit('prefinish'); + state.prefinished = true + stream.emit('prefinish') } } } function finishMaybe(stream, state, sync) { if (needFinish(state)) { - prefinish(stream, state); + prefinish(stream, state) + if (state.pendingcb === 0) { if (sync) { - state.pendingcb++; - process.nextTick((stream, state) => { - if (needFinish(state)) { - finish(stream, state); - } else { - state.pendingcb--; - } - }, stream, state); + state.pendingcb++ + process.nextTick( + (stream, state) => { + if (needFinish(state)) { + finish(stream, state) + } else { + state.pendingcb-- + } + }, + stream, + state + ) } else if (needFinish(state)) { - state.pendingcb++; - finish(stream, state); + state.pendingcb++ + finish(stream, state) } } } } function finish(stream, state) { - state.pendingcb--; - state.finished = true; + state.pendingcb-- + state.finished = true + const onfinishCallbacks = state[kOnFinished].splice(0) - const onfinishCallbacks = state[kOnFinished].splice(0); for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](); + onfinishCallbacks[i]() } - stream.emit('finish'); + stream.emit('finish') if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the readable side is ready for autoDestroy as well. - const rState = stream._readableState; - const autoDestroy = !rState || ( - rState.autoDestroy && - // We don't expect the readable to ever 'end' - // if readable is explicitly set to false. - (rState.endEmitted || rState.readable === false) - ); + const rState = stream._readableState + const autoDestroy = + !rState || + (rState.autoDestroy && // We don't expect the readable to ever 'end' + // if readable is explicitly set to false. + (rState.endEmitted || rState.readable === false)) + if (autoDestroy) { - stream.destroy(); + stream.destroy() } } } ObjectDefineProperties(Writable.prototype, { - closed: { get() { - return this._writableState ? this._writableState.closed : false; + return this._writableState ? this._writableState.closed : false } }, - destroyed: { get() { - return this._writableState ? this._writableState.destroyed : false; + return this._writableState ? this._writableState.destroyed : false }, + set(value) { // Backward compatibility, the user is explicitly managing destroyed. if (this._writableState) { - this._writableState.destroyed = value; + this._writableState.destroyed = value } } }, - writable: { get() { - const w = this._writableState; - // w.writable === false means that this is part of a Duplex stream + const w = this._writableState // w.writable === false means that this is part of a Duplex stream // where the writable side was disabled upon construction. // Compat. The user might manually disable writable side through // deprecated setter. - return !!w && w.writable !== false && !w.destroyed && !w.errored && - !w.ending && !w.ended; + + return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended }, + set(val) { // Backwards compatible. if (this._writableState) { - this._writableState.writable = !!val; + this._writableState.writable = !!val } } }, - writableFinished: { get() { - return this._writableState ? this._writableState.finished : false; + return this._writableState ? this._writableState.finished : false } }, - writableObjectMode: { get() { - return this._writableState ? this._writableState.objectMode : false; + return this._writableState ? this._writableState.objectMode : false } }, - writableBuffer: { get() { - return this._writableState && this._writableState.getBuffer(); + return this._writableState && this._writableState.getBuffer() } }, - writableEnded: { get() { - return this._writableState ? this._writableState.ending : false; + return this._writableState ? this._writableState.ending : false } }, - writableNeedDrain: { get() { - const wState = this._writableState; - if (!wState) return false; - return !wState.destroyed && !wState.ending && wState.needDrain; + const wState = this._writableState + if (!wState) return false + return !wState.destroyed && !wState.ending && wState.needDrain } }, - writableHighWaterMark: { get() { - return this._writableState && this._writableState.highWaterMark; + return this._writableState && this._writableState.highWaterMark } }, - writableCorked: { get() { - return this._writableState ? this._writableState.corked : 0; + return this._writableState ? this._writableState.corked : 0 } }, - writableLength: { get() { - return this._writableState && this._writableState.length; + return this._writableState && this._writableState.length } }, - errored: { enumerable: false, + get() { - return this._writableState ? this._writableState.errored : null; + return this._writableState ? this._writableState.errored : null } }, - writableAborted: { enumerable: false, - get: function() { + get: function () { return !!( this._writableState.writable !== false && (this._writableState.destroyed || this._writableState.errored) && !this._writableState.finished - ); + ) } - }, -}); + } +}) +const destroy = destroyImpl.destroy -const destroy = destroyImpl.destroy; -Writable.prototype.destroy = function(err, cb) { - const state = this._writableState; +Writable.prototype.destroy = function (err, cb) { + const state = this._writableState // Invoke pending callbacks. - // Invoke pending callbacks. - if (!state.destroyed && - (state.bufferedIndex < state.buffered.length || - state[kOnFinished].length)) { - process.nextTick(errorBuffer, state); + if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) { + process.nextTick(errorBuffer, state) } - destroy.call(this, err, cb); - return this; -}; + destroy.call(this, err, cb) + return this +} + +Writable.prototype._undestroy = destroyImpl.undestroy -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function(err, cb) { - cb(err); -}; +Writable.prototype._destroy = function (err, cb) { + cb(err) +} -Writable.prototype[EE.captureRejectionSymbol] = function(err) { - this.destroy(err); -}; +Writable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err) +} -let webStreamsAdapters; +let webStreamsAdapters // Lazy to avoid circular references -// Lazy to avoid circular references function lazyWebStreams() { - if (webStreamsAdapters === undefined) - webStreamsAdapters = {}; - return webStreamsAdapters; + if (webStreamsAdapters === undefined) webStreamsAdapters = {} + return webStreamsAdapters } -Writable.fromWeb = function(writableStream, options) { - return lazyWebStreams().newStreamWritableFromWritableStream( - writableStream, - options); -}; +Writable.fromWeb = function (writableStream, options) { + return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options) +} -Writable.toWeb = function(streamWritable) { - return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable); -}; +Writable.toWeb = function (streamWritable) { + return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable) +} diff --git a/lib/internal/validators.js b/lib/internal/validators.js index 89337dd068..c84abf40a7 100644 --- a/lib/internal/validators.js +++ b/lib/internal/validators.js @@ -1,4 +1,4 @@ -'use strict'; +'use strict' const { ArrayIsArray, @@ -12,37 +12,30 @@ const { RegExpPrototypeTest, String, StringPrototypeToUpperCase, - StringPrototypeTrim, -} = require('../ours/primordials'); + StringPrototypeTrim +} = require('../ours/primordials') const { hideStackFrames, - codes: { - ERR_SOCKET_BAD_PORT, - ERR_INVALID_ARG_TYPE, - ERR_INVALID_ARG_VALUE, - ERR_OUT_OF_RANGE, - ERR_UNKNOWN_SIGNAL, - } -} = require('../ours/errors'); -const { normalizeEncoding } = require('../ours/util'); -const { - isAsyncFunction, - isArrayBufferView -} = require('util').types; -const signals = {}; + codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL } +} = require('../ours/errors') + +const { normalizeEncoding } = require('../ours/util') + +const { isAsyncFunction, isArrayBufferView } = require('util').types + +const signals = {} function isInt32(value) { - return value === (value | 0); + return value === (value | 0) } function isUint32(value) { - return value === (value >>> 0); + return value === value >>> 0 } -const octalReg = /^[0-7]+$/; -const modeDesc = 'must be a 32-bit unsigned integer or an octal string'; - +const octalReg = /^[0-7]+$/ +const modeDesc = 'must be a 32-bit unsigned integer or an octal string' /** * Parse and validate values that will be converted into mode_t (the S_* * constants). Only valid numbers and octal strings are allowed. They could be @@ -55,94 +48,89 @@ const modeDesc = 'must be a 32-bit unsigned integer or an octal string'; * @param {number} [def] If specified, will be returned for invalid values * @returns {number} */ + function parseFileMode(value, name, def) { - if (typeof value === 'undefined') { - value = def; - } - + if (typeof value === 'undefined') { + value = def + } + if (typeof value === 'string') { if (!RegExpPrototypeTest(octalReg, value)) { - throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc); + throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc) } - value = NumberParseInt(value, 8); + + value = NumberParseInt(value, 8) } - validateInt32(value, name, 0, 2 ** 32 - 1); - return value; + validateInt32(value, name, 0, 2 ** 32 - 1) + return value } -const validateInteger = hideStackFrames( - (value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => { - if (typeof value !== 'number') - throw new ERR_INVALID_ARG_TYPE(name, 'number', value); - if (!NumberIsInteger(value)) - throw new ERR_OUT_OF_RANGE(name, 'an integer', value); - if (value < min || value > max) - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); +const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => { + if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value) + if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value) + if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) +}) +const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => { + // The defaults for min and max correspond to the limits of 32-bit integers. + if (typeof value !== 'number') { + throw new ERR_INVALID_ARG_TYPE(name, 'number', value) } -); -const validateInt32 = hideStackFrames( - (value, name, min = -2147483648, max = 2147483647) => { - // The defaults for min and max correspond to the limits of 32-bit integers. - if (typeof value !== 'number') { - throw new ERR_INVALID_ARG_TYPE(name, 'number', value); - } - if (!isInt32(value)) { - if (!NumberIsInteger(value)) { - throw new ERR_OUT_OF_RANGE(name, 'an integer', value); - } - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); - } - if (value < min || value > max) { - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value); + if (!isInt32(value)) { + if (!NumberIsInteger(value)) { + throw new ERR_OUT_OF_RANGE(name, 'an integer', value) } + + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) } -); + if (value < min || value > max) { + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) + } +}) const validateUint32 = hideStackFrames((value, name, positive) => { if (typeof value !== 'number') { - throw new ERR_INVALID_ARG_TYPE(name, 'number', value); + throw new ERR_INVALID_ARG_TYPE(name, 'number', value) } + if (!isUint32(value)) { if (!NumberIsInteger(value)) { - throw new ERR_OUT_OF_RANGE(name, 'an integer', value); + throw new ERR_OUT_OF_RANGE(name, 'an integer', value) } - const min = positive ? 1 : 0; - // 2 ** 32 === 4294967296 - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && < 4294967296`, value); + + const min = positive ? 1 : 0 // 2 ** 32 === 4294967296 + + throw new ERR_OUT_OF_RANGE(name, `>= ${min} && < 4294967296`, value) } + if (positive && value === 0) { - throw new ERR_OUT_OF_RANGE(name, '>= 1 && < 4294967296', value); + throw new ERR_OUT_OF_RANGE(name, '>= 1 && < 4294967296', value) } -}); +}) function validateString(value, name) { - if (typeof value !== 'string') - throw new ERR_INVALID_ARG_TYPE(name, 'string', value); + if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value) } function validateNumber(value, name) { - if (typeof value !== 'number') - throw new ERR_INVALID_ARG_TYPE(name, 'number', value); + if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value) } const validateOneOf = hideStackFrames((value, name, oneOf) => { if (!ArrayPrototypeIncludes(oneOf, value)) { const allowed = ArrayPrototypeJoin( - ArrayPrototypeMap(oneOf, (v) => - (typeof v === 'string' ? `'${v}'` : String(v))), - ', '); - const reason = 'must be one of: ' + allowed; - throw new ERR_INVALID_ARG_VALUE(name, value, reason); + ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))), + ', ' + ) + const reason = 'must be one of: ' + allowed + throw new ERR_INVALID_ARG_VALUE(name, value, reason) } -}); +}) function validateBoolean(value, name) { - if (typeof value !== 'boolean') - throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value); + if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value) } - /** * @param {unknown} value * @param {string} name @@ -152,99 +140,88 @@ function validateBoolean(value, name) { * nullable?: boolean * }} [options] */ -const validateObject = hideStackFrames( - (value, name, options) => { - const useDefaultOptions = options == null; - const allowArray = useDefaultOptions ? false : options.allowArray; - const allowFunction = useDefaultOptions ? false : options.allowFunction; - const nullable = useDefaultOptions ? false : options.nullable; - if ((!nullable && value === null) || - (!allowArray && ArrayIsArray(value)) || - (typeof value !== 'object' && ( - !allowFunction || typeof value !== 'function' - ))) { - throw new ERR_INVALID_ARG_TYPE(name, 'Object', value); - } - }); +const validateObject = hideStackFrames((value, name, options) => { + const useDefaultOptions = options == null + const allowArray = useDefaultOptions ? false : options.allowArray + const allowFunction = useDefaultOptions ? false : options.allowFunction + const nullable = useDefaultOptions ? false : options.nullable + + if ( + (!nullable && value === null) || + (!allowArray && ArrayIsArray(value)) || + (typeof value !== 'object' && (!allowFunction || typeof value !== 'function')) + ) { + throw new ERR_INVALID_ARG_TYPE(name, 'Object', value) + } +}) const validateArray = hideStackFrames((value, name, minLength = 0) => { if (!ArrayIsArray(value)) { - throw new ERR_INVALID_ARG_TYPE(name, 'Array', value); + throw new ERR_INVALID_ARG_TYPE(name, 'Array', value) } + if (value.length < minLength) { - const reason = `must be longer than ${minLength}`; - throw new ERR_INVALID_ARG_VALUE(name, value, reason); + const reason = `must be longer than ${minLength}` + throw new ERR_INVALID_ARG_VALUE(name, value, reason) } -}); +}) function validateSignalName(signal, name = 'signal') { - validateString(signal, name); + validateString(signal, name) if (signals[signal] === undefined) { if (signals[StringPrototypeToUpperCase(signal)] !== undefined) { - throw new ERR_UNKNOWN_SIGNAL(signal + - ' (signals must use all capital letters)'); + throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)') } - throw new ERR_UNKNOWN_SIGNAL(signal); + throw new ERR_UNKNOWN_SIGNAL(signal) } } const validateBuffer = hideStackFrames((buffer, name = 'buffer') => { if (!isArrayBufferView(buffer)) { - throw new ERR_INVALID_ARG_TYPE(name, - ['Buffer', 'TypedArray', 'DataView'], - buffer); + throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer) } -}); +}) function validateEncoding(data, encoding) { - const normalizedEncoding = normalizeEncoding(encoding); - const length = data.length; + const normalizedEncoding = normalizeEncoding(encoding) + const length = data.length if (normalizedEncoding === 'hex' && length % 2 !== 0) { - throw new ERR_INVALID_ARG_VALUE('encoding', encoding, - `is invalid for data of length ${length}`); + throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`) } -} - -// Check that the port number is not NaN when coerced to a number, +} // Check that the port number is not NaN when coerced to a number, // is an integer and that it falls within the legal range of port numbers. + function validatePort(port, name = 'Port', allowZero = true) { - if ((typeof port !== 'number' && typeof port !== 'string') || - (typeof port === 'string' && StringPrototypeTrim(port).length === 0) || - +port !== (+port >>> 0) || - port > 0xFFFF || - (port === 0 && !allowZero)) { - throw new ERR_SOCKET_BAD_PORT(name, port, allowZero); + if ( + (typeof port !== 'number' && typeof port !== 'string') || + (typeof port === 'string' && StringPrototypeTrim(port).length === 0) || + +port !== +port >>> 0 || + port > 0xffff || + (port === 0 && !allowZero) + ) { + throw new ERR_SOCKET_BAD_PORT(name, port, allowZero) } - return port | 0; + + return port | 0 } const validateAbortSignal = hideStackFrames((signal, name) => { - if (signal !== undefined && - (signal === null || - typeof signal !== 'object' || - !('aborted' in signal))) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal); + if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) { + throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) } -}); - +}) const validateFunction = hideStackFrames((value, name) => { - if (typeof value !== 'function') - throw new ERR_INVALID_ARG_TYPE(name, 'Function', value); -}); - + if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) +}) const validatePlainFunction = hideStackFrames((value, name) => { - if (typeof value !== 'function' || isAsyncFunction(value)) - throw new ERR_INVALID_ARG_TYPE(name, 'Function', value); -}); - + if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) +}) const validateUndefined = hideStackFrames((value, name) => { - if (value !== undefined) - throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value); -}); - + if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value) +}) module.exports = { isInt32, isUint32, @@ -265,5 +242,5 @@ module.exports = { validateString, validateUint32, validateUndefined, - validateAbortSignal, -}; + validateAbortSignal +} diff --git a/lib/ours/browser.js b/lib/ours/browser.js index ac901c17e7..7083fb31e5 100644 --- a/lib/ours/browser.js +++ b/lib/ours/browser.js @@ -1,12 +1,12 @@ 'use strict' const CustomStream = require('../stream') + const promises = require('../stream/promises') -const originalDestroy = CustomStream.Readable.destroy -module.exports = CustomStream.Readable +const originalDestroy = CustomStream.Readable.destroy +module.exports = CustomStream.Readable // Explicit export naming is needed for ESM -// Explicit export naming is needed for ESM module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer module.exports._isUint8Array = CustomStream._isUint8Array module.exports.isDisturbed = CustomStream.isDisturbed @@ -23,16 +23,14 @@ module.exports.destroy = CustomStream.destroy module.exports.destroy = originalDestroy module.exports.pipeline = CustomStream.pipeline module.exports.compose = CustomStream.compose - Object.defineProperty(CustomStream, 'promises', { configurable: true, enumerable: true, + get() { return promises } }) +module.exports.Stream = CustomStream.Stream // Allow default importing -module.exports.Stream = CustomStream.Stream - -// Allow default importing module.exports.default = module.exports diff --git a/lib/ours/errors.js b/lib/ours/errors.js index 07948aa1d0..355cfd019c 100644 --- a/lib/ours/errors.js +++ b/lib/ours/errors.js @@ -1,5 +1,4 @@ 'use strict' - /* This file is a reduced and adapted version of the main lib/internal/errors.js file defined at @@ -14,6 +13,7 @@ if (typeof AggregateError === 'undefined') { } const assert = require('assert') + const { inspect, format } = require('util') const kIsNodeError = Symbol('kIsNodeError') @@ -21,8 +21,7 @@ const kTypes = [ 'string', 'function', 'number', - 'object', - // Accept 'Function' and 'Object' as alternative to the lower cased version. + 'object', // Accept 'Function' and 'Object' as alternative to the lower cased version. 'Function', 'Object', 'boolean', @@ -31,16 +30,17 @@ const kTypes = [ ] const classRegExp = /^([A-Z][a-z0-9]*)+$/ const nodeInternalPrefix = '__node_internal_' -const codes = {} +const codes = {} // Only use this for integers! Decimal numbers do not work with this function. -// Only use this for integers! Decimal numbers do not work with this function. function addNumericalSeparator(val) { let res = '' let i = val.length const start = val[0] === '-' ? 1 : 0 + for (; i >= start + 4; i -= 3) { res = `_${val.slice(i - 3, i)}${res}` } + return `${val.slice(0, i)}${res}` } @@ -50,12 +50,10 @@ function getMessage(key, msg, args) { msg.length <= args.length, // Default options do not count. `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).` ) - return msg(...args) } const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length - assert( expectedLength === args.length, `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).` @@ -86,6 +84,7 @@ function E(code, message, Base) { NodeError.prototype.name = Base.name NodeError.prototype.code = code NodeError.prototype[kIsNodeError] = true + NodeError.prototype.toString = function () { return `${this.name} [${code}]: ${this.message}` } @@ -97,7 +96,9 @@ function hideStackFrames(fn) { // We rename the functions that will be hidden to cut off the stacktrace // at the outermost one const hidden = nodeInternalPrefix + fn.name - Object.defineProperty(fn, 'name', { value: hidden }) + Object.defineProperty(fn, 'name', { + value: hidden + }) return fn } @@ -139,6 +140,7 @@ E( } let msg = 'The ' + if (name.endsWith(' argument')) { // For cases like 'first argument' msg += `${name} ` @@ -147,7 +149,6 @@ E( } msg += 'must be ' - const types = [] const instances = [] const other = [] @@ -163,10 +164,9 @@ E( assert(value !== 'object', 'The value "object" should be written as "Object"') other.push(value) } - } - - // Special handle `object` in case other instances are allowed to outline + } // Special handle `object` in case other instances are allowed to outline // the differences between each other. + if (instances.length > 0) { const pos = types.indexOf('object') @@ -181,9 +181,11 @@ E( case 1: msg += `of type ${types[0]}` break + case 2: msg += `one of type ${types[0]} or ${types[1]}` break + default: { const last = types.pop() msg += `one of type ${types.join(', ')}, or ${last}` @@ -200,9 +202,11 @@ E( case 1: msg += `an instance of ${instances[0]}` break + case 2: msg += `an instance of ${instances[0]} or ${instances[1]}` break + default: { const last = instances.pop() msg += `an instance of ${instances.join(', ')}, or ${last}` @@ -217,6 +221,7 @@ E( switch (other.length) { case 0: break + case 1: if (other[0].toLowerCase() !== other[0]) { msg += 'an ' @@ -224,9 +229,11 @@ E( msg += `${other[0]}` break + case 2: msg += `one of ${other[0]} or ${other[1]}` break + default: { const last = other.pop() msg += `one of ${other.join(', ')}, or ${last}` @@ -238,51 +245,71 @@ E( } else if (typeof actual === 'function' && actual.name) { msg += `. Received function ${actual.name}` } else if (typeof actual === 'object') { - if (actual.constructor?.name) { + var _actual$constructor + + if ( + (_actual$constructor = actual.constructor) !== null && + _actual$constructor !== undefined && + _actual$constructor.name + ) { msg += `. Received an instance of ${actual.constructor.name}` } else { - const inspected = inspect(actual, { depth: -1 }) + const inspected = inspect(actual, { + depth: -1 + }) msg += `. Received ${inspected}` } } else { - let inspected = inspect(actual, { colors: false }) + let inspected = inspect(actual, { + colors: false + }) + if (inspected.length > 25) { inspected = `${inspected.slice(0, 25)}...` } + msg += `. Received type ${typeof actual} (${inspected})` } + return msg }, TypeError ) - E( 'ERR_INVALID_ARG_VALUE', (name, value, reason = 'is invalid') => { let inspected = inspect(value) + if (inspected.length > 128) { inspected = inspected.slice(0, 128) + '...' } + const type = name.includes('.') ? 'property' : 'argument' return `The ${type} '${name}' ${reason}. Received ${inspected}` }, TypeError ) - E( 'ERR_INVALID_RETURN_VALUE', (input, name, value) => { - const type = value?.constructor?.name ? `instance of ${value.constructor.name}` : `type ${typeof value}` + var _value$constructor + + const type = + value !== null && + value !== undefined && + (_value$constructor = value.constructor) !== null && + _value$constructor !== undefined && + _value$constructor.name + ? `instance of ${value.constructor.name}` + : `type ${typeof value}` return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.` }, TypeError ) - E( 'ERR_MISSING_ARGS', (...args) => { assert(args.length > 0, 'At least one arg needs to be specified') - let msg const len = args.length args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ') @@ -291,9 +318,11 @@ E( case 1: msg += `The ${args[0]} argument` break + case 2: msg += `The ${args[0]} and ${args[1]} arguments` break + default: { const last = args.pop() @@ -306,12 +335,10 @@ E( }, TypeError ) - E( 'ERR_OUT_OF_RANGE', (str, range, input) => { assert(range, 'Missing "range" argument') - let received if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { @@ -332,7 +359,6 @@ E( }, RangeError ) - E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error) E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error) E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error) @@ -344,7 +370,6 @@ E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error) E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error) E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error) E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError) - module.exports = { AbortError, aggregateTwoErrors: hideStackFrames(aggregateTwoErrors), diff --git a/lib/ours/index.js b/lib/ours/index.js index 6cd12dcfb3..e365dce318 100644 --- a/lib/ours/index.js +++ b/lib/ours/index.js @@ -3,9 +3,8 @@ const Stream = require('stream') if (Stream && process.env.READABLE_STREAM === 'disable') { - const promises = require('stream/promises') + const promises = require('stream/promises') // Explicit export naming is needed for ESM - // Explicit export naming is needed for ESM module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer module.exports._isUint8Array = Stream._isUint8Array module.exports.isDisturbed = Stream.isDisturbed @@ -21,24 +20,23 @@ if (Stream && process.env.READABLE_STREAM === 'disable') { module.exports.destroy = Stream.destroy module.exports.pipeline = Stream.pipeline module.exports.compose = Stream.compose - Object.defineProperty(Stream, 'promises', { configurable: true, enumerable: true, + get() { return promises } }) - module.exports.Stream = Stream.Stream } else { const CustomStream = require('../stream') + const promises = require('../stream/promises') - const originalDestroy = CustomStream.Readable.destroy - module.exports = CustomStream.Readable + const originalDestroy = CustomStream.Readable.destroy + module.exports = CustomStream.Readable // Explicit export naming is needed for ESM - // Explicit export naming is needed for ESM module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer module.exports._isUint8Array = CustomStream._isUint8Array module.exports.isDisturbed = CustomStream.isDisturbed @@ -55,17 +53,15 @@ if (Stream && process.env.READABLE_STREAM === 'disable') { module.exports.destroy = originalDestroy module.exports.pipeline = CustomStream.pipeline module.exports.compose = CustomStream.compose - Object.defineProperty(CustomStream, 'promises', { configurable: true, enumerable: true, + get() { return promises } }) - module.exports.Stream = CustomStream.Stream -} +} // Allow default importing -// Allow default importing module.exports.default = module.exports diff --git a/lib/ours/primordials.js b/lib/ours/primordials.js index 14e2680bbc..fab7a28e44 100644 --- a/lib/ours/primordials.js +++ b/lib/ours/primordials.js @@ -1,5 +1,4 @@ 'use strict' - /* This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at @@ -12,34 +11,45 @@ module.exports = { ArrayIsArray(self) { return Array.isArray(self) }, + ArrayPrototypeIncludes(self, el) { return self.includes(el) }, + ArrayPrototypeIndexOf(self, el) { return self.indexOf(el) }, + ArrayPrototypeJoin(self, sep) { return self.join(sep) }, + ArrayPrototypeMap(self, fn) { return self.map(fn) }, + ArrayPrototypePop(self, el) { return self.pop(el) }, + ArrayPrototypePush(self, el) { return self.push(el) }, + ArrayPrototypeSlice(self, start, end) { return self.slice(start, end) }, + Error, + FunctionPrototypeCall(fn, thisArgs, ...args) { return fn.call(thisArgs, ...args) }, + FunctionPrototypeSymbolHasInstance(self, instance) { return Function.prototype[Symbol.hasInstance].call(self, instance) }, + MathFloor: Math.floor, Number, NumberIsInteger: Number.isInteger, @@ -47,55 +57,74 @@ module.exports = { NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, NumberParseInt: Number.parseInt, + ObjectDefineProperties(self, props) { return Object.defineProperties(self, props) }, + ObjectDefineProperty(self, name, prop) { return Object.defineProperty(self, name, prop) }, + ObjectGetOwnPropertyDescriptor(self, name) { return Object.getOwnPropertyDescriptor(self, name) }, + ObjectKeys(obj) { return Object.keys(obj) }, + ObjectSetPrototypeOf(target, proto) { return Object.setPrototypeOf(target, proto) }, + Promise, + PromisePrototypeCatch(self, fn) { return self.catch(fn) }, + PromisePrototypeThen(self, thenFn, catchFn) { return self.then(thenFn, catchFn) }, + PromiseReject(err) { return Promise.reject(err) }, + ReflectApply: Reflect.apply, + RegExpPrototypeTest(self, value) { return self.test(value) }, + SafeSet: Set, String, + StringPrototypeSlice(self, start, end) { return self.slice(start, end) }, + StringPrototypeToLowerCase(self) { return self.toLowerCase() }, + StringPrototypeToUpperCase(self) { return self.toUpperCase() }, + StringPrototypeTrim(self) { return self.trim() }, + Symbol, SymbolAsyncIterator: Symbol.asyncIterator, SymbolHasInstance: Symbol.hasInstance, SymbolIterator: Symbol.iterator, + TypedArrayPrototypeSet(self, buf, len) { return self.set(buf, len) }, + Uint8Array } diff --git a/lib/ours/util.js b/lib/ours/util.js index 210fa18fb5..5d39c25869 100644 --- a/lib/ours/util.js +++ b/lib/ours/util.js @@ -2,6 +2,16 @@ const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor +if (typeof Blob === 'undefined') { + let { Blob } = require('buffer') + + if (typeof Blob === 'undefined') { + Blob = require('blob-polyfill').Blob + } + + globalThis.Blob = Blob +} + module.exports = { once(callback) { let called = false @@ -9,24 +19,32 @@ module.exports = { if (called) { return } + called = true callback.apply(this, args) } }, + createDeferredPromise: function () { let resolve - let reject - // eslint-disable-next-line promise/param-names + let reject // eslint-disable-next-line promise/param-names + const promise = new Promise((res, rej) => { resolve = res reject = rej }) - return { promise, resolve, reject } + return { + promise, + resolve, + reject + } }, + // All following functions are just used in browser debuglog() { return function () {} }, + format(format, ...args) { // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args return format.replace(/%([sdifj])/g, function (...[_unused, type]) { @@ -41,25 +59,33 @@ module.exports = { } }) }, + promisify(fn) { return new Promise((resolve, reject) => { fn((err, ...args) => { if (err) { return reject(err) } + return resolve(...args) }) }) }, + inspect: require('object-inspect'), types: { isAsyncFunction(fn) { return fn instanceof AsyncFunction }, + isArrayBufferView(arr) { return ArrayBuffer.isView(arr) } + }, + + isBlob(blob) { + // eslint-disable-next-line no-undef + return blob instanceof Blob } } - module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/lib/stream.js b/lib/stream.js index 299713c3d4..8f7ed9d70d 100644 --- a/lib/stream.js +++ b/lib/stream.js @@ -18,120 +18,132 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' -'use strict'; +const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials') const { - ObjectDefineProperty, - ObjectKeys, - ReflectApply, -} = require('./ours/primordials'); + promisify: { custom: customPromisify } +} = require('util') -const { - promisify: { custom: customPromisify }, -} = require('util'); +const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators') const { - streamReturningOperators, - promiseReturningOperators, -} = require('./internal/streams/operators'); + codes: { ERR_ILLEGAL_CONSTRUCTOR } +} = require('./ours/errors') + +const compose = require('./internal/streams/compose') + +const { pipeline } = require('./internal/streams/pipeline') + +const { destroyer } = require('./internal/streams/destroy') + +const eos = require('./internal/streams/end-of-stream') + +const internalBuffer = {} + +const promises = require('./stream/promises') + +const utils = require('./internal/streams/utils') + +const Stream = (module.exports = require('./internal/streams/legacy').Stream) + +Stream.isDisturbed = utils.isDisturbed +Stream.isErrored = utils.isErrored +Stream.isReadable = utils.isReadable +Stream.Readable = require('./internal/streams/readable') -const { - codes: { - ERR_ILLEGAL_CONSTRUCTOR, - }, -} = require('./ours/errors'); -const compose = require('./internal/streams/compose'); -const { pipeline } = require('./internal/streams/pipeline'); -const { destroyer } = require('./internal/streams/destroy'); -const eos = require('./internal/streams/end-of-stream'); -const internalBuffer = {}; - -const promises = require('./stream/promises'); -const utils = require('./internal/streams/utils'); - -const Stream = module.exports = require('./internal/streams/legacy').Stream; -Stream.isDisturbed = utils.isDisturbed; -Stream.isErrored = utils.isErrored; -Stream.isReadable = utils.isReadable; -Stream.Readable = require('./internal/streams/readable'); for (const key of ObjectKeys(streamReturningOperators)) { - const op = streamReturningOperators[key]; + const op = streamReturningOperators[key] + function fn(...args) { if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR(); + throw ERR_ILLEGAL_CONSTRUCTOR() } - return Stream.Readable.from(ReflectApply(op, this, args)); + + return Stream.Readable.from(ReflectApply(op, this, args)) } - ObjectDefineProperty(fn, 'name', { value: op.name }); - ObjectDefineProperty(fn, 'length', { value: op.length }); + + ObjectDefineProperty(fn, 'name', { + value: op.name + }) + ObjectDefineProperty(fn, 'length', { + value: op.length + }) ObjectDefineProperty(Stream.Readable.prototype, key, { value: fn, enumerable: false, configurable: true, - writable: true, - }); + writable: true + }) } + for (const key of ObjectKeys(promiseReturningOperators)) { - const op = promiseReturningOperators[key]; + const op = promiseReturningOperators[key] + function fn(...args) { if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR(); + throw ERR_ILLEGAL_CONSTRUCTOR() } - return ReflectApply(op, this, args); + + return ReflectApply(op, this, args) } - ObjectDefineProperty(fn, 'name', { value: op.name }); - ObjectDefineProperty(fn, 'length', { value: op.length }); + + ObjectDefineProperty(fn, 'name', { + value: op.name + }) + ObjectDefineProperty(fn, 'length', { + value: op.length + }) ObjectDefineProperty(Stream.Readable.prototype, key, { value: fn, enumerable: false, configurable: true, - writable: true, - }); + writable: true + }) } -Stream.Writable = require('./internal/streams/writable'); -Stream.Duplex = require('./internal/streams/duplex'); -Stream.Transform = require('./internal/streams/transform'); -Stream.PassThrough = require('./internal/streams/passthrough'); -Stream.pipeline = pipeline; -const { addAbortSignal } = require('./internal/streams/add-abort-signal'); -Stream.addAbortSignal = addAbortSignal; -Stream.finished = eos; -Stream.destroy = destroyer; -Stream.compose = compose; +Stream.Writable = require('./internal/streams/writable') +Stream.Duplex = require('./internal/streams/duplex') +Stream.Transform = require('./internal/streams/transform') +Stream.PassThrough = require('./internal/streams/passthrough') +Stream.pipeline = pipeline + +const { addAbortSignal } = require('./internal/streams/add-abort-signal') + +Stream.addAbortSignal = addAbortSignal +Stream.finished = eos +Stream.destroy = destroyer +Stream.compose = compose ObjectDefineProperty(Stream, 'promises', { configurable: true, enumerable: true, + get() { - return promises; + return promises } -}); - +}) ObjectDefineProperty(pipeline, customPromisify, { enumerable: true, + get() { - return promises.pipeline; + return promises.pipeline } -}); - +}) ObjectDefineProperty(eos, customPromisify, { enumerable: true, + get() { - return promises.finished; + return promises.finished } -}); +}) // Backwards-compat with node 0.4.x -// Backwards-compat with node 0.4.x -Stream.Stream = Stream; +Stream.Stream = Stream +Stream._isUint8Array = function isUint8Array(value) { + return value instanceof Uint8Array +} - Stream._isUint8Array = function isUint8Array(value) { - return value instanceof Uint8Array - }; - Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk.buffer, - chunk.byteOffset, - chunk.byteLength); -}; + return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) +} diff --git a/lib/stream/promises.js b/lib/stream/promises.js index a9b726e9ee..5e7972ee8a 100644 --- a/lib/stream/promises.js +++ b/lib/stream/promises.js @@ -1,41 +1,43 @@ -'use strict'; +'use strict' -const { - ArrayPrototypePop, - Promise, -} = require('../ours/primordials'); +const { ArrayPrototypePop, Promise } = require('../ours/primordials') -const { - isIterable, - isNodeStream, -} = require('../internal/streams/utils'); +const { isIterable, isNodeStream } = require('../internal/streams/utils') -const { pipelineImpl: pl } = require('../internal/streams/pipeline'); -const { finished } = require('../internal/streams/end-of-stream'); +const { pipelineImpl: pl } = require('../internal/streams/pipeline') + +const { finished } = require('../internal/streams/end-of-stream') function pipeline(...streams) { return new Promise((resolve, reject) => { - let signal; - let end; - const lastArg = streams[streams.length - 1]; - if (lastArg && typeof lastArg === 'object' && - !isNodeStream(lastArg) && !isIterable(lastArg)) { - const options = ArrayPrototypePop(streams); - signal = options.signal; - end = options.end; + let signal + let end + const lastArg = streams[streams.length - 1] + + if (lastArg && typeof lastArg === 'object' && !isNodeStream(lastArg) && !isIterable(lastArg)) { + const options = ArrayPrototypePop(streams) + signal = options.signal + end = options.end } - pl(streams, (err, value) => { - if (err) { - reject(err); - } else { - resolve(value); + pl( + streams, + (err, value) => { + if (err) { + reject(err) + } else { + resolve(value) + } + }, + { + signal, + end } - }, { signal, end }); - }); + ) + }) } module.exports = { finished, - pipeline, -}; + pipeline +} diff --git a/package.json b/package.json index f4df05ddec..02635d37cc 100644 --- a/package.json +++ b/package.json @@ -38,34 +38,35 @@ "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "test:browsers": "airtap -p all test/browser/test-*.js", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "format": "prettier -w src", + "format": "prettier -w src lib test", "lint": "eslint src" }, "dependencies": { "abort-controller": "^3.0.0", "aggregate-error": "^3.1.0", + "blob-polyfill": "^7.0.20220408", "object-inspect": "^1.12.0" }, "devDependencies": { + "@babel/core": "^7.17.9", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", + "@babel/plugin-proposal-optional-chaining": "^7.16.7", "@sinonjs/fake-timers": "^9.1.1", "airtap": "^4.0.4", "airtap-playwright": "^1.0.1", "c8": "^7.11.0", "eslint": "^7.32.0", "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^6.0.0", "prettier": "^2.6.2", "tap": "^16.0.1", "tape": "^5.5.2", "tar": "^6.1.11", - "undici": "^5.0.0", - "util-promisify": "^3.0.0" + "undici": "^5.0.0" }, "engines": { - "node": ">= 14.15.0" - }, - "nyc": { - "include": [ - "lib/**/*.js" - ] + "node": ">= 12.22.0" } } diff --git a/src/test/browser/test-stream2-large-read-stall.js b/src/test/browser/test-stream2-large-read-stall.js index 25e64b503d..cee0f9ca06 100644 --- a/src/test/browser/test-stream2-large-read-stall.js +++ b/src/test/browser/test-stream2-large-read-stall.js @@ -23,9 +23,10 @@ test('large object read stall', function (t) { r.on('readable', function () { false && console.error('>> readable') + let ret do { false && console.error(' > read(%d)', READSIZE) - var ret = r.read(READSIZE) + ret = r.read(READSIZE) false && console.error(' < %j (%d remain)', ret && ret.length, rs.length) } while (ret && ret.length === READSIZE) diff --git a/src/util.js b/src/util.js index 210fa18fb5..745a7acf9e 100644 --- a/src/util.js +++ b/src/util.js @@ -2,6 +2,16 @@ const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor +if (typeof Blob === 'undefined') { + let { Blob } = require('buffer') + + if (typeof Blob === 'undefined') { + Blob = require('blob-polyfill').Blob + } + + globalThis.Blob = Blob +} + module.exports = { once(callback) { let called = false @@ -59,6 +69,10 @@ module.exports = { isArrayBufferView(arr) { return ArrayBuffer.isView(arr) } + }, + isBlob(blob) { + // eslint-disable-next-line no-undef + return blob instanceof Blob } } diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js index 5c096e5115..8099aa3bfd 100644 --- a/test/browser/test-stream-big-packet.js +++ b/test/browser/test-stream-big-packet.js @@ -1,17 +1,19 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Transform } = require('../../lib/ours/index') test('big packet', function (t) { t.plan(3) - let passed = false function PassThrough() { Transform.call(this) } + inherits(PassThrough, Transform) PassThrough.prototype._transform = function (chunk, encoding, done) { @@ -22,6 +24,7 @@ test('big packet', function (t) { function TestStream() { Transform.call(this) } + inherits(TestStream, Transform) TestStream.prototype._transform = function (chunk, encoding, done) { @@ -29,32 +32,31 @@ test('big packet', function (t) { // Char 'a' only exists in the last write passed = indexOf(chunk.toString(), 'a') >= 0 } + if (passed) { t.ok(passed) } + done() } const s1 = new PassThrough() const s2 = new PassThrough() const s3 = new TestStream() + s1.pipe(s3) // Don't let s2 auto close which may close s3 - s1.pipe(s3) - // Don't let s2 auto close which may close s3 - s2.pipe(s3, { end: false }) + s2.pipe(s3, { + end: false + }) // We must write a buffer larger than highWaterMark - // We must write a buffer larger than highWaterMark const big = Buffer.alloc(s1._writableState.highWaterMark + 1) - big.fill('x') - - // Since big is larger than highWaterMark, it will be buffered internally. - t.notOk(s1.write(big)) + big.fill('x') // Since big is larger than highWaterMark, it will be buffered internally. - // 'tiny' is small enough to pass through internal buffer. - t.ok(s2.write('tiny')) + t.notOk(s1.write(big)) // 'tiny' is small enough to pass through internal buffer. - // Write some small data in next IO loop, which will never be written to s3 + t.ok(s2.write('tiny')) // Write some small data in next IO loop, which will never be written to s3 // Because 'drain' event is not emitted from s1 and s1 is still paused + setImmediate(s1.write.bind(s1), 'later') function indexOf(xs, x) { @@ -63,6 +65,7 @@ test('big packet', function (t) { return i } } + return -1 } }) diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js index 97a5cff3a6..b0fba6e365 100644 --- a/test/browser/test-stream-big-push.js +++ b/test/browser/test-stream-big-push.js @@ -1,18 +1,16 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('big push', function (t) { t.plan(10) - const str = 'asdfasdfasdfasdfasdf' - const r = new Readable({ highWaterMark: 5, encoding: 'utf8' }) - let reads = 0 let eofed = false let ended = false @@ -36,20 +34,16 @@ test('big push', function (t) { r.on('end', function () { ended = true - }) - - // push some data in to start. + }) // push some data in to start. // we've never gotten any read event at this point. - const ret = r.push(str) - // should be false. > hwm + const ret = r.push(str) // should be false. > hwm + t.notOk(ret) let chunk = r.read() t.equal(chunk, str) - chunk = r.read() t.equal(chunk, null) - r.once('readable', function () { // this time, we'll get *all* the remaining data, because // it's been added synchronously, as the read WOULD take @@ -57,11 +51,9 @@ test('big push', function (t) { // which synchronously added more, which we then return. chunk = r.read() t.equal(chunk, str + str) - chunk = r.read() t.equal(chunk, null) }) - r.on('end', function () { t.ok(eofed) t.ok(ended) diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js index d35e641c3d..1f56ee3f31 100644 --- a/test/browser/test-stream-duplex.js +++ b/test/browser/test-stream-duplex.js @@ -1,16 +1,16 @@ 'use strict' const test = require('tape') + const { Duplex } = require('../../lib/ours/index') test('duplex', function (t) { t.plan(4) - - const stream = new Duplex({ objectMode: true }) - + const stream = new Duplex({ + objectMode: true + }) t.ok(stream._readableState.objectMode) t.ok(stream._writableState.objectMode) - let written let read @@ -24,13 +24,15 @@ test('duplex', function (t) { stream.on('data', function (obj) { read = obj }) - stream.on('end', function () { t.equal(read.val, 1) t.equal(written.val, 2) }) - - stream.push({ val: 1 }) - stream.end({ val: 2 }) + stream.push({ + val: 1 + }) + stream.end({ + val: 2 + }) stream.push(null) }) diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js index 7a8909674f..4130ceaf9d 100644 --- a/test/browser/test-stream-end-paused.js +++ b/test/browser/test-stream-end-paused.js @@ -1,11 +1,11 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('end pause', function (t) { t.plan(2) - const stream = new Readable() let calledRead = false @@ -18,9 +18,7 @@ test('end pause', function (t) { stream.on('data', function () { throw new Error('should not ever get data') }) - stream.pause() - setTimeout(function () { stream.on('end', function () { t.ok(calledRead) diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js index 6c688c0a22..73bc4b63ba 100644 --- a/test/browser/test-stream-finished.js +++ b/test/browser/test-stream-finished.js @@ -1,65 +1,52 @@ 'use strict' const test = require('tape') + const { Writable, Readable, Transform, finished } = require('../../lib/ours/index') test('readable finished', function (t) { t.plan(1) - const rs = new Readable({ read: function read() {} }) - finished(rs, (err) => { t.ifErr(err) }) - rs.push(null) rs.resume() }) - test('writable finished', function (t) { t.plan(1) - const ws = new Writable({ write: function write(data, enc, cb) { cb() } }) - finished(ws, (err) => { t.ifErr(err) }) - ws.end() }) - test('transform finished', function (t) { t.plan(3) - const tr = new Transform({ transform: function transform(data, enc, cb) { cb() } }) - let finish = false let ended = false - tr.on('end', function () { ended = true }) - tr.on('finish', function () { finish = true }) - finished(tr, (err) => { t.ifErr(err) t.ok(finish) t.ok(ended) }) - tr.end() tr.resume() }) diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js index f9cf113f1c..f30337cad2 100644 --- a/test/browser/test-stream-ispaused.js +++ b/test/browser/test-stream-ispaused.js @@ -1,25 +1,20 @@ 'use strict' const test = require('tape') + const stream = require('../../lib/ours/index') test('is paused', function (t) { t.plan(4) + const readable = new stream.Readable() // _read is a noop, here. - const readable = new stream.Readable() - - // _read is a noop, here. - readable._read = () => {} + readable._read = () => {} // default state of a stream is not "paused" - // default state of a stream is not "paused" - t.notOk(readable.isPaused()) + t.notOk(readable.isPaused()) // make the stream start flowing... - // make the stream start flowing... - readable.on('data', () => {}) + readable.on('data', () => {}) // still not paused. - // still not paused. t.notOk(readable.isPaused()) - readable.pause() t.ok(readable.isPaused()) readable.resume() diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js index dc33a4e645..10270b44f3 100644 --- a/test/browser/test-stream-pipe-after-end.js +++ b/test/browser/test-stream-pipe-after-end.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Readable, Writable } = require('../../lib/ours/index') test('pipe after end', function (t) { @@ -11,15 +13,18 @@ test('pipe after end', function (t) { if (!(this instanceof TestReadable)) { return new TestReadable(opt) } + Readable.call(this, opt) this._ended = false } + inherits(TestReadable, Readable) TestReadable.prototype._read = function (n) { if (this._ended) { this.emit('error', new Error('_read called twice')) } + this._ended = true this.push(null) } @@ -28,40 +33,37 @@ test('pipe after end', function (t) { if (!(this instanceof TestWritable)) { return new TestWritable(opt) } + Writable.call(this, opt) this._written = [] } + inherits(TestWritable, Writable) TestWritable.prototype._write = function (chunk, encoding, cb) { this._written.push(chunk) + cb() - } + } // this one should not emit 'end' until we read() from it later. - // this one should not emit 'end' until we read() from it later. const ender = new TestReadable() - let enderEnded = false + let enderEnded = false // what happens when you pipe() a Readable that's already ended? - // what happens when you pipe() a Readable that's already ended? - const piper = new TestReadable() - // pushes EOF null, and length=0, so this will trigger 'end' - piper.read() + const piper = new TestReadable() // pushes EOF null, and length=0, so this will trigger 'end' + piper.read() setTimeout(function () { ender.on('end', function () { enderEnded = true t.ok(true, 'enderEnded') }) t.notOk(enderEnded) - const c = ender.read() t.equal(c, null) - const w = new TestWritable() w.on('finish', function () { t.ok(true, 'writableFinished') }) - piper.pipe(w) }) }) diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js index 2ca267511b..36ad573dfa 100644 --- a/test/browser/test-stream-pipe-cleanup-pause.js +++ b/test/browser/test-stream-pipe-cleanup-pause.js @@ -1,18 +1,17 @@ 'use strict' const test = require('tape') + const stream = require('../../lib/ours/index') test('pipe cleanup pause', function (t) { t.plan(3) - const reader = new stream.Readable() const writer1 = new stream.Writable() - const writer2 = new stream.Writable() - - // 560000 is chosen here because it is larger than the (default) highWaterMark + const writer2 = new stream.Writable() // 560000 is chosen here because it is larger than the (default) highWaterMark // and will cause `.write()` to return false // See: https://github.com/nodejs/node/issues/2323 + const buffer = Buffer.alloc(560000) reader._read = function () {} @@ -26,10 +25,8 @@ test('pipe cleanup pause', function (t) { reader.unpipe(writer1) reader.pipe(writer2) reader.push(buffer) - setImmediate(function () { reader.push(buffer) - setImmediate(function () { reader.push(buffer) }) diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js index ee07304d6b..34c2cd3496 100644 --- a/test/browser/test-stream-pipe-cleanup.js +++ b/test/browser/test-stream-pipe-cleanup.js @@ -1,9 +1,10 @@ -'use strict' -// This test asserts that Stream.prototype.pipe does not leave listeners +'use strict' // This test asserts that Stream.prototype.pipe does not leave listeners // hanging on the source or dest. const test = require('tape') + const inherits = require('inherits') + const { Stream } = require('../../lib/ours/index') test('pipe cleanup', function (t) { @@ -18,6 +19,7 @@ test('pipe cleanup', function (t) { this.endCalls = 0 Stream.call(this) } + inherits(Writable, Stream) Writable.prototype.end = function () { @@ -56,9 +58,9 @@ test('pipe cleanup', function (t) { r.pipe(w) r.emit('end') } + t.equal(0, r.listeners('end').length) t.equal(limit, w.endCalls) - w.endCalls = 0 for (i = 0; i < limit; i++) { @@ -66,11 +68,10 @@ test('pipe cleanup', function (t) { r.pipe(w) r.emit('close') } + t.equal(0, r.listeners('close').length) t.equal(limit, w.endCalls) - w.endCalls = 0 - r = new Readable() for (i = 0; i < limit; i++) { @@ -78,17 +79,23 @@ test('pipe cleanup', function (t) { r.pipe(w) w.emit('close') } - t.equal(0, w.listeners('close').length) + t.equal(0, w.listeners('close').length) r = new Readable() w = new Writable() const d = new Duplex() r.pipe(d) // pipeline A + d.pipe(w) // pipeline B + t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup + t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup + t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup + t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup + t.equal(w.listeners('end').length, 0) t.equal(w.listeners('close').length, 1) // B.cleanup @@ -98,7 +105,9 @@ test('pipe cleanup', function (t) { t.equal(r.listeners('end').length, 0) t.equal(r.listeners('close').length, 0) t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup + t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup + t.equal(w.listeners('end').length, 0) t.equal(w.listeners('close').length, 1) // B.cleanup diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js index 55455805a9..0e4bdb182e 100644 --- a/test/browser/test-stream-pipe-error-handling.js +++ b/test/browser/test-stream-pipe-error-handling.js @@ -1,39 +1,36 @@ 'use strict' const test = require('tape') + const { Readable, Writable, Stream } = require('../../lib/ours/index') test('Error Listener Catches', function (t) { t.plan(1) - const source = new Stream() const dest = new Stream() source._read = function () {} - source.pipe(dest) + source.pipe(dest) let gotErr = null source.on('error', function (err) { gotErr = err }) - const err = new Error('This stream turned into bacon.') source.emit('error', err) t.strictEqual(gotErr, err) }) - test('Error WithoutListener Throws', function (t) { t.plan(1) - const source = new Stream() const dest = new Stream() source._read = function () {} - source.pipe(dest) + source.pipe(dest) const err = new Error('This stream turned into bacon.') - let gotErr = null + try { source.emit('error', err) } catch (e) { @@ -42,12 +39,9 @@ test('Error WithoutListener Throws', function (t) { t.strictEqual(gotErr, err) }) - test('Error With Removed Listener Throws', function (t) { t.plan(2) - const onerror = global.onerror - const r = new Readable() const w = new Writable() let removed = false @@ -74,10 +68,8 @@ test('Error With Removed Listener Throws', function (t) { caught = true } }) - test('Error Listener Catches When Wrong Listener Is Removed', function (t) { t.plan(2) - const r = new Readable() const w = new Writable() let removed = false @@ -91,10 +83,11 @@ test('Error Listener Catches When Wrong Listener Is Removed', function (t) { } w.on('error', myOnError) + w._write = function () {} - r.pipe(w) - // Removing some OTHER random listener should not do anything + r.pipe(w) // Removing some OTHER random listener should not do anything + w.removeListener('error', function () {}) removed = true diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js index 272a23cbca..e173829cf7 100644 --- a/test/browser/test-stream-pipe-event.js +++ b/test/browser/test-stream-pipe-event.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Stream } = require('../../lib/ours/index') test('pipe event', function (t) { @@ -11,24 +13,24 @@ test('pipe event', function (t) { this.writable = true Stream.call(this) } + inherits(Writable, Stream) function Readable() { this.readable = true Stream.call(this) } - inherits(Readable, Stream) + inherits(Readable, Stream) let passed = false - const w = new Writable() w.on('pipe', function (src) { passed = true }) - const r = new Readable() + r._read = function () {} - r.pipe(w) + r.pipe(w) t.ok(passed) }) diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js index 36fa85e462..9b9790560f 100644 --- a/test/browser/test-stream-pipe-without-listenerCount.js +++ b/test/browser/test-stream-pipe-without-listenerCount.js @@ -1,20 +1,18 @@ 'use strict' const test = require('tape') + const { Stream } = require('../../lib/ours/index') test('pipe without listenerCount on read', function (t) { t.plan(1) - const r = new Stream({ read: function () {} }) r.listenerCount = undefined - const w = new Stream() w.on('pipe', function () { r.emit('error', new Error('Readable Error')) }) - t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function') }) diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js index 26bba1a1c4..ff20df05ae 100644 --- a/test/browser/test-stream-pipeline.js +++ b/test/browser/test-stream-pipeline.js @@ -1,27 +1,23 @@ 'use strict' const test = require('tape') + const { Readable, Writable, pipeline } = require('../../lib/ours/index') test('pipeline', function (t) { t.plan(3) - let finished = false - const processed = [] const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] - const read = new Readable({ read: function read() {} }) - const write = new Writable({ write: function write(data, enc, cb) { processed.push(data) cb() } }) - write.on('finish', function () { finished = true }) @@ -37,7 +33,6 @@ test('pipeline', function (t) { t.deepEqual(processed, expected) }) }) - test('pipeline missing args', function (t) { t.plan(3) @@ -48,16 +43,13 @@ test('pipeline missing args', function (t) { t.throws(function () { pipeline(_read, function () {}) }) - t.throws(function () { pipeline(function () {}) }) - t.throws(function () { pipeline() }) }) - test('pipeline error', function (t) { t.plan(1) @@ -76,12 +68,10 @@ test('pipeline error', function (t) { setImmediate(function () { return _read2.destroy() }) - pipeline(_read2, _write, (err) => { t.equal(err.message, 'Premature close') }) }) - test('pipeline destroy', function (t) { t.plan(2) @@ -100,10 +90,8 @@ test('pipeline destroy', function (t) { setImmediate(function () { return _read3.destroy(new Error('kaboom')) }) - const dst = pipeline(_read3, _write2, (err) => { t.equal(err.message, 'kaboom') }) - t.equal(dst, _write2) }) diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js index 6867c874cb..5bb19aad90 100644 --- a/test/browser/test-stream-push-order.js +++ b/test/browser/test-stream-push-order.js @@ -1,20 +1,20 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('push order', function (t) { t.plan(1) - const s = new Readable({ highWaterMark: 20, encoding: 'ascii' }) - const list = ['1', '2', '3', '4', '5', '6'] s._read = function (n) { const one = list.shift() + if (!one) { s.push(null) } else { @@ -25,7 +25,6 @@ test('push order', function (t) { } s.read(0) - setTimeout(function () { t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6') }) diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js index d2bad89d61..4cb45d9e68 100644 --- a/test/browser/test-stream-push-strings.js +++ b/test/browser/test-stream-push-strings.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Readable } = require('../../lib/ours/index') test('push strings', function (t) { @@ -18,6 +20,7 @@ test('push strings', function (t) { switch (this._chunks--) { case 0: return this.push(null) + case 1: return setTimeout( function () { @@ -25,29 +28,32 @@ test('push strings', function (t) { }.bind(this), 100 ) + case 2: return this.push('second to last chunk') + case 3: return process.nextTick( function () { this.push('first chunk') }.bind(this) ) + default: throw new Error('?') } } - const expect = ['first chunksecond to last chunk', 'last chunk'] + const expect = ['first chunksecond to last chunk', 'last chunk'] const ms = new MyStream() const results = [] ms.on('readable', function () { let chunk + while ((chunk = ms.read()) !== null) { results.push(chunk + '') } }) - ms.on('end', function () { t.equal(ms._chunks, -1) t.deepEqual(results, expect) diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js index 895dfd1d1a..be77969f44 100644 --- a/test/browser/test-stream-readable-constructor-set-methods.js +++ b/test/browser/test-stream-readable-constructor-set-methods.js @@ -1,11 +1,11 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('readable constructor set methods', function (t) { t.plan(2) - let _readCalled = false function _read(n) { @@ -13,9 +13,10 @@ test('readable constructor set methods', function (t) { this.push(null) } - const r = new Readable({ read: _read }) + const r = new Readable({ + read: _read + }) r.resume() - setTimeout(function () { t.equal(r._read, _read) t.ok(_readCalled) diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js index dc2a9ea65c..124f077dda 100644 --- a/test/browser/test-stream-readable-event.js +++ b/test/browser/test-stream-readable-event.js @@ -1,25 +1,23 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('readable events - first', (t) => { - t.plan(3) - - // First test, not reading when the readable is added. + t.plan(3) // First test, not reading when the readable is added. // make sure that on('readable', ...) triggers a readable event. + const r = new Readable({ highWaterMark: 3 }) - let _readCalled = false + r._read = function (n) { _readCalled = true - } + } // This triggers a 'readable' event, which is lost. - // This triggers a 'readable' event, which is lost. r.push(Buffer.from('blerg')) - let caughtReadable = false setTimeout(function () { // we're testing what we think we are @@ -29,31 +27,25 @@ test('readable events - first', (t) => { setTimeout(function () { // we're testing what we think we are t.notOk(_readCalled) - t.ok(caughtReadable) }) }) }) }) - test('readable events - second', (t) => { - t.plan(3) - - // second test, make sure that readable is re-emitted if there's + t.plan(3) // second test, make sure that readable is re-emitted if there's // already a length, while it IS reading. const r = new Readable({ highWaterMark: 3 }) - let _readCalled = false + r._read = function (n) { _readCalled = true - } + } // This triggers a 'readable' event, which is lost. - // This triggers a 'readable' event, which is lost. r.push(Buffer.from('bl')) - let caughtReadable = false setTimeout(function () { // assert we're testing what we think we are @@ -63,31 +55,26 @@ test('readable events - second', (t) => { setTimeout(function () { // we're testing what we think we are t.ok(_readCalled) - t.ok(caughtReadable) }) }) }) }) - test('readable events - third', (t) => { - t.plan(3) - - // Third test, not reading when the stream has not passed + t.plan(3) // Third test, not reading when the stream has not passed // the highWaterMark but *has* reached EOF. + const r = new Readable({ highWaterMark: 30 }) - let _readCalled = false + r._read = function (n) { _readCalled = true - } + } // This triggers a 'readable' event, which is lost. - // This triggers a 'readable' event, which is lost. r.push(Buffer.from('blerg')) r.push(null) - let caughtReadable = false setTimeout(function () { // assert we're testing what we think we are @@ -97,7 +84,6 @@ test('readable events - third', (t) => { setTimeout(function () { // we're testing what we think we are t.notOk(_readCalled) - t.ok(caughtReadable) }) }) diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js index 06fbebab84..9195853a0d 100644 --- a/test/browser/test-stream-sync-write.js +++ b/test/browser/test-stream-sync-write.js @@ -1,18 +1,20 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Writable } = require('../../lib/ours/index') test('should bea ble to write sync', function (t) { t.plan(2) - let internalCalls = 0 let externalCalls = 0 const InternalStream = function () { Writable.call(this) } + inherits(InternalStream, Writable) InternalStream.prototype._write = function (chunk, encoding, callback) { @@ -26,10 +28,12 @@ test('should bea ble to write sync', function (t) { this._writable = writable Writable.call(this) } + inherits(ExternalStream, Writable) ExternalStream.prototype._write = function (chunk, encoding, callback) { externalCalls++ + this._writable.write(chunk, encoding, callback) } diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js index 2ce4a0ea08..d9ff9e39fd 100644 --- a/test/browser/test-stream-transform-constructor-set-methods.js +++ b/test/browser/test-stream-transform-constructor-set-methods.js @@ -1,18 +1,20 @@ 'use strict' const test = require('tape') + const { Transform } = require('../../lib/ours/index') test('transform constructor set methods', function (t) { t.plan(4) - let _transformCalled = false + function _transform(d, e, n) { _transformCalled = true n() } let _flushCalled = false + function _flush(n) { _flushCalled = true n() @@ -22,10 +24,8 @@ test('transform constructor set methods', function (t) { transform: _transform, flush: _flush }) - tr.end(Buffer.from('blerg')) tr.resume() - tr.on('end', function () { t.equal(tr._transform, _transform) t.equal(tr._flush, _flush) diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js index bd2359bb51..2d043fc3df 100644 --- a/test/browser/test-stream-transform-objectmode-falsey-value.js +++ b/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -1,27 +1,29 @@ 'use strict' const test = require('tape') + const { PassThrough } = require('../../lib/ours/index') test('transform objectmode falsey value', function (t) { t.plan(13) - - const src = new PassThrough({ objectMode: true }) - const tx = new PassThrough({ objectMode: true }) - const dest = new PassThrough({ objectMode: true }) - + const src = new PassThrough({ + objectMode: true + }) + const tx = new PassThrough({ + objectMode: true + }) + const dest = new PassThrough({ + objectMode: true + }) const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] const results = [] dest.on('end', function () { t.deepEqual(results, expect) }) - dest.on('data', function (x) { results.push(x) }) - src.pipe(tx).pipe(dest) - let i = -1 const int = setInterval(function () { if (i > 10) { diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js index 21515858ca..256b86cc29 100644 --- a/test/browser/test-stream-transform-split-objectmode.js +++ b/test/browser/test-stream-transform-split-objectmode.js @@ -1,36 +1,36 @@ 'use strict' const test = require('tape') + const { Transform } = require('../../lib/ours/index') test('transform split objectmode', function (t) { t.plan(10) - - const parser = new Transform({ readableObjectMode: true }) - + const parser = new Transform({ + readableObjectMode: true + }) t.ok(parser._readableState.objectMode, 'parser 1') t.notOk(parser._writableState.objectMode, 'parser 2') t.equals(parser._readableState.highWaterMark, 16, 'parser 3') t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4') parser._transform = function (chunk, enc, callback) { - callback(null, { val: chunk[0] }) + callback(null, { + val: chunk[0] + }) } let parsed - parser.on('data', function (obj) { parsed = obj }) - parser.end(Buffer.from([42])) - parser.on('end', function () { t.equals(parsed.val, 42, 'parser ended') }) - - const serializer = new Transform({ writableObjectMode: true }) - + const serializer = new Transform({ + writableObjectMode: true + }) t.notOk(serializer._readableState.objectMode, 'serializer 1') t.ok(serializer._writableState.objectMode, 'serializer 2') t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3') @@ -41,13 +41,12 @@ test('transform split objectmode', function (t) { } let serialized - serializer.on('data', function (chunk) { serialized = chunk }) - - serializer.write({ val: 42 }) - + serializer.write({ + val: 42 + }) serializer.on('end', function () { t.equals(serialized[0], 42, 'searlizer ended') }) diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js index 6580116231..dea52a22f5 100644 --- a/test/browser/test-stream-unshift-empty-chunk.js +++ b/test/browser/test-stream-unshift-empty-chunk.js @@ -1,11 +1,11 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('unshift empty chunk', function (t) { t.plan(1) - const r = new Readable() let nChunks = 10 const chunk = Buffer.alloc(10) @@ -21,20 +21,20 @@ test('unshift empty chunk', function (t) { const seen = [] r.on('readable', function () { let chunk + while ((chunk = r.read())) { - seen.push(chunk.toString()) - // simulate only reading a certain amount of the data, + seen.push(chunk.toString()) // simulate only reading a certain amount of the data, // and then putting the rest of the chunk back into the // stream, like a parser might do. We just fill it with // 'y' so that it's easy to see which bits were touched, // and which were not. + const putBack = Buffer.alloc(readAll ? 0 : 5) putBack.fill('y') readAll = !readAll r.unshift(putBack) } }) - const expect = [ 'xxxxxxxxxx', 'yyyyy', @@ -55,7 +55,6 @@ test('unshift empty chunk', function (t) { 'xxxxxxxxxx', 'yyyyy' ] - r.on('end', function () { t.deepEqual(seen, expect) }) diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js index d7e17d143e..68983e643f 100644 --- a/test/browser/test-stream-unshift-read-race.js +++ b/test/browser/test-stream-unshift-read-race.js @@ -1,6 +1,4 @@ -'use strict' - -// This test verifies that: +'use strict' // This test verifies that: // 1. unshift() does not cause colliding _read() calls. // 2. unshift() after the 'end' event is an error, but after the EOF // signalling null, it is ok, and just creates a new readable chunk. @@ -8,16 +6,18 @@ // 4. _read() is not called after pushing the EOF null chunk. const test = require('tape') + const stream = require('../../lib/ours/index') test('unshift read race', function (t) { t.plan(139) - const hwm = 10 - const r = stream.Readable({ highWaterMark: hwm }) + const r = stream.Readable({ + highWaterMark: hwm + }) const chunks = 10 - const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2)) + for (let i = 0; i < data.length; i++) { const c = 'asdf'.charCodeAt(i % 4) data[i] = c @@ -25,19 +25,21 @@ test('unshift read race', function (t) { let pos = 0 let pushedNull = false + r._read = function (n) { - t.notOk(pushedNull, '_read after null push') + t.notOk(pushedNull, '_read after null push') // every third chunk is fast - // every third chunk is fast push(!(chunks % 3)) function push(fast) { t.notOk(pushedNull, 'push() after null push') const c = pos >= data.length ? null : data.slice(pos, pos + n) pushedNull = c === null + if (fast) { pos += n r.push(c) + if (c === null) { pushError() } @@ -45,6 +47,7 @@ test('unshift read race', function (t) { setTimeout(function () { pos += n r.push(c) + if (c === null) { pushError() } @@ -56,8 +59,8 @@ test('unshift read race', function (t) { function pushError() { r.unshift(Buffer.allocUnsafe(1)) w.end() - const onerror = global.onerror + global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { t.ok(true) global.onerror = onerror @@ -68,47 +71,52 @@ test('unshift read race', function (t) { const w = stream.Writable() const written = [] + w._write = function (chunk, encoding, cb) { written.push(chunk.toString()) cb() } r.on('end', t.fail) - r.on('readable', function () { let chunk + while ((chunk = r.read(10)) !== null) { w.write(chunk) + if (chunk.length > 4) { r.unshift(Buffer.from('1234')) } } }) - w.on('finish', function () { // each chunk should start with 1234, and then be asfdasdfasdf... // The first got pulled out before the first unshift('1234'), so it's // lacking that piece. t.equal(written[0], 'asdfasdfas') - let asdf = 'd' + let asdf = 'd' // console.error('0: %s', written[0]); - // console.error('0: %s', written[0]); for (let i = 1; i < written.length; i++) { // console.error('%s: %s', i.toString(32), written[i]); t.equal(written[i].slice(0, 4), '1234') + for (let j = 4; j < written[i].length; j++) { const c = written[i].charAt(j) t.equal(c, asdf) + switch (asdf) { case 'a': asdf = 's' break + case 's': asdf = 'd' break + case 'd': asdf = 'f' break + case 'f': asdf = 'a' break diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js index ef8ec00184..89652f8e8e 100644 --- a/test/browser/test-stream-writable-change-default-encoding.js +++ b/test/browser/test-stream-writable-change-default-encoding.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const stream = require('../../lib/ours/index') inherits(MyWritable, stream.Writable) @@ -18,50 +20,51 @@ function MyWritable(fn, options) { test('defaultCondingIsUtf8', (t) => { t.plan(1) - const m = new MyWritable( function (isBuffer, type, enc) { t.equal(enc, 'utf8') }, - { decodeStrings: false } + { + decodeStrings: false + } ) m.write('foo') m.end() }) - test('changeDefaultEncodingToAscii', (t) => { t.plan(1) - const m = new MyWritable( function (isBuffer, type, enc) { t.equal(enc, 'ascii') }, - { decodeStrings: false } + { + decodeStrings: false + } ) m.setDefaultEncoding('ascii') m.write('bar') m.end() }) - test('changeDefaultEncodingToInvalidValue', (t) => { t.plan(1) - t.throws(function () { - const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false }) + const m = new MyWritable(function (isBuffer, type, enc) {}, { + decodeStrings: false + }) m.setDefaultEncoding({}) m.write('bar') m.end() }, TypeError) }) - test('checkVairableCaseEncoding', (t) => { t.plan(1) - const m = new MyWritable( function (isBuffer, type, enc) { t.equal(enc, 'ascii') }, - { decodeStrings: false } + { + decodeStrings: false + } ) m.setDefaultEncoding('AsCii') m.write('bar') diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js index 7eb357e6e6..952b46c1d5 100644 --- a/test/browser/test-stream-writable-constructor-set-methods.js +++ b/test/browser/test-stream-writable-constructor-set-methods.js @@ -1,33 +1,36 @@ 'use strict' const test = require('tape') + const { Writable } = require('../../lib/ours/index') test('writable constructor set methods', function (t) { t.plan(5) - let _writeCalled = false + function _write(d, e, n) { _writeCalled = true } - const w = new Writable({ write: _write }) + const w = new Writable({ + write: _write + }) w.end(Buffer.from('blerg')) - let _writevCalled = false let dLength = 0 + function _writev(d, n) { dLength = d.length _writevCalled = true } - const w2 = new Writable({ writev: _writev }) + const w2 = new Writable({ + writev: _writev + }) w2.cork() - w2.write(Buffer.from('blerg')) w2.write(Buffer.from('blerg')) w2.end() - setImmediate(function () { t.equal(w._write, _write) t.ok(_writeCalled) diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js index 608e7a89fc..c4b39346e6 100644 --- a/test/browser/test-stream-writable-decoded-encoding.js +++ b/test/browser/test-stream-writable-decoded-encoding.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const stream = require('../../lib/ours/index') function MyWritable(fn, options) { @@ -18,31 +20,30 @@ MyWritable.prototype._write = function (chunk, encoding, callback) { test('decodeStringsTrue', (t) => { t.plan(3) - const m = new MyWritable( function (isBuffer, type, enc) { t.ok(isBuffer) t.equal(type, 'object') - t.equal(enc, 'buffer') - // console.log('ok - decoded string is decoded'); + t.equal(enc, 'buffer') // console.log('ok - decoded string is decoded'); }, - { decodeStrings: true } + { + decodeStrings: true + } ) m.write('some-text', 'utf8') m.end() }) - test('decodeStringsFalse', (t) => { t.plan(3) - const m = new MyWritable( function (isBuffer, type, enc) { t.notOk(isBuffer) t.equal(type, 'string') - t.equal(enc, 'utf8') - // console.log('ok - un-decoded string is not decoded'); + t.equal(enc, 'utf8') // console.log('ok - un-decoded string is not decoded'); }, - { decodeStrings: false } + { + decodeStrings: false + } ) m.write('some-text', 'utf8') m.end() diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js index 66022703a2..a85cafe98f 100644 --- a/test/browser/test-stream-writev.js +++ b/test/browser/test-stream-writev.js @@ -1,9 +1,11 @@ 'use strict' const test = require('tape') + const stream = require('../../lib/ours/index') const queue = [] + for (let decode = 0; decode < 2; decode++) { for (let uncork = 0; uncork < 2; uncork++) { for (let multi = 0; multi < 2; multi++) { @@ -14,11 +16,11 @@ for (let decode = 0; decode < 2; decode++) { function runTest(decode, uncork, multi) { return function (t) { - t.plan(8) + t.plan(8) // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi); - // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi); let counter = 0 let expectCount = 0 + function cnt(msg) { expectCount++ const expect = expectCount @@ -26,33 +28,67 @@ function runTest(decode, uncork, multi) { if (er) { throw er } + counter++ t.equal(counter, expect) } } - const w = new stream.Writable({ decodeStrings: decode }) + const w = new stream.Writable({ + decodeStrings: decode + }) + w._write = function (chunk, e, cb) { t.ok(false, 'Should not call _write') } const expectChunks = decode ? [ - { encoding: 'buffer', chunk: [104, 101, 108, 108, 111, 44, 32] }, - { encoding: 'buffer', chunk: [119, 111, 114, 108, 100] }, - { encoding: 'buffer', chunk: [33] }, - { encoding: 'buffer', chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, - { encoding: 'buffer', chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] } + { + encoding: 'buffer', + chunk: [104, 101, 108, 108, 111, 44, 32] + }, + { + encoding: 'buffer', + chunk: [119, 111, 114, 108, 100] + }, + { + encoding: 'buffer', + chunk: [33] + }, + { + encoding: 'buffer', + chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] + }, + { + encoding: 'buffer', + chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] + } ] : [ - { encoding: 'ascii', chunk: 'hello, ' }, - { encoding: 'utf8', chunk: 'world' }, - { encoding: 'buffer', chunk: [33] }, - { encoding: 'binary', chunk: '\nand then...' }, - { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' } + { + encoding: 'ascii', + chunk: 'hello, ' + }, + { + encoding: 'utf8', + chunk: 'world' + }, + { + encoding: 'buffer', + chunk: [33] + }, + { + encoding: 'binary', + chunk: '\nand then...' + }, + { + encoding: 'hex', + chunk: 'facebea7deadbeefdecafbad' + } ] - let actualChunks + w._writev = function (chunks, cb) { actualChunks = chunks.map(function (chunk) { return { @@ -85,7 +121,6 @@ function runTest(decode, uncork, multi) { } w.end(cnt('end')) - w.on('finish', function () { // make sure finish comes after all the write cb cnt('finish')() @@ -96,6 +131,5 @@ function runTest(decode, uncork, multi) { for (let i = 0; i < queue.length; i++) { const tr = queue[i] - test('round ' + i, runTest(tr[0], tr[1], tr[2])) } diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js index 6e09201f67..916ddd2b74 100644 --- a/test/browser/test-stream2-base64-single-char-read-end.js +++ b/test/browser/test-stream2-base64-single-char-read-end.js @@ -1,12 +1,14 @@ 'use strict' const test = require('tape') + const { Readable, Writable } = require('../../lib/ours/index') test('base64 single char read end', function (t) { t.plan(1) - - const src = new Readable({ encoding: 'base64' }) + const src = new Readable({ + encoding: 'base64' + }) const dst = new Writable() let hasRead = false const accum = [] @@ -30,9 +32,7 @@ test('base64 single char read end', function (t) { t.equal(Buffer.concat(accum) + '', 'MQ==') clearTimeout(timeout) }) - src.pipe(dst) - const timeout = setTimeout(function () { t.fail('timed out waiting for _write') }, 100) diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js index bed96d8903..7b61cfbd6c 100644 --- a/test/browser/test-stream2-compatibility.js +++ b/test/browser/test-stream2-compatibility.js @@ -1,17 +1,19 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Readable } = require('../../lib/ours/index') test('compatibility', function (t) { t.plan(1) - let ondataCalled = 0 function TestReader() { Readable.apply(this) this._buffer = Buffer.alloc(100) + this._buffer.fill('x') this.on('data', function () { @@ -29,6 +31,5 @@ test('compatibility', function (t) { setTimeout(function () { t.equal(ondataCalled, 1) }) - new TestReader().read() }) diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js index 25e64b503d..10d50e2a70 100644 --- a/test/browser/test-stream2-large-read-stall.js +++ b/test/browser/test-stream2-large-read-stall.js @@ -1,28 +1,25 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('large object read stall', function (t) { - t.plan(1) - - // If everything aligns so that you do a read(n) of exactly the + t.plan(1) // If everything aligns so that you do a read(n) of exactly the // remaining buffer, then make sure that 'end' still emits. const READSIZE = 100 const PUSHSIZE = 20 const PUSHCOUNT = 1000 const HWM = 50 - const r = new Readable({ highWaterMark: HWM }) const rs = r._readableState - r._read = push - r.on('readable', function () { false && console.error('>> readable') + do { false && console.error(' > read(%d)', READSIZE) var ret = r.read(READSIZE) @@ -31,14 +28,12 @@ test('large object read stall', function (t) { false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length) }) - r.on('end', function () { t.equal(pushes, PUSHCOUNT + 1) - false && console.error('end') }) - let pushes = 0 + function push() { if (pushes > PUSHCOUNT) { return @@ -50,11 +45,11 @@ test('large object read stall', function (t) { } false && console.error(' push #%d', pushes) + if (r.push(Buffer.alloc(PUSHSIZE))) { setTimeout(push) } - } + } // start the flow - // start the flow r.read(0) }) diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js index 986d9d57dc..3bd2bac61e 100644 --- a/test/browser/test-stream2-objects.js +++ b/test/browser/test-stream2-objects.js @@ -1,11 +1,15 @@ 'use strict' const test = require('tape') + const { Readable, Writable } = require('../../lib/ours/index') function toArray(callback) { - const stream = new Writable({ objectMode: true }) + const stream = new Writable({ + objectMode: true + }) const list = [] + stream.write = function (chunk) { list.push(chunk) } @@ -18,13 +22,14 @@ function toArray(callback) { } function fromArray(list) { - const r = new Readable({ objectMode: true }) + const r = new Readable({ + objectMode: true + }) r._read = noop forEach(list, function (chunk) { r.push(chunk) }) r.push(null) - return r } @@ -32,45 +37,77 @@ function noop() {} test('can read objects from stream', function (t) { t.plan(3) - - const r = fromArray([{ one: '1' }, { two: '2' }]) - + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) const v1 = r.read() const v2 = r.read() const v3 = r.read() - - t.deepEqual(v1, { one: '1' }) - t.deepEqual(v2, { two: '2' }) + t.deepEqual(v1, { + one: '1' + }) + t.deepEqual(v2, { + two: '2' + }) t.deepEqual(v3, null) }) - test('can pipe objects into stream', function (t) { t.plan(1) - - const r = fromArray([{ one: '1' }, { two: '2' }]) - + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) r.pipe( toArray(function (list) { - t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + t.deepEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) }) ) }) - test('read(n) is ignored', function (t) { t.plan(1) - - const r = fromArray([{ one: '1' }, { two: '2' }]) - + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) const value = r.read(2) - - t.deepEqual(value, { one: '1' }) + t.deepEqual(value, { + one: '1' + }) }) - test('can read objects from _read (sync)', function (t) { t.plan(1) + const r = new Readable({ + objectMode: true + }) + const list = [ + { + one: '1' + }, + { + two: '2' + } + ] - const r = new Readable({ objectMode: true }) - const list = [{ one: '1' }, { two: '2' }] r._read = function (n) { const item = list.shift() r.push(item || null) @@ -78,16 +115,31 @@ test('can read objects from _read (sync)', function (t) { r.pipe( toArray(function (list) { - t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + t.deepEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) }) ) }) - test('can read objects from _read (async)', function (t) { t.plan(1) + const r = new Readable({ + objectMode: true + }) + const list = [ + { + one: '1' + }, + { + two: '2' + } + ] - const r = new Readable({ objectMode: true }) - const list = [{ one: '1' }, { two: '2' }] r._read = function (n) { const item = list.shift() process.nextTick(function () { @@ -97,14 +149,19 @@ test('can read objects from _read (async)', function (t) { r.pipe( toArray(function (list) { - t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + t.deepEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) }) ) }) - test('can read strings as objects', function (t) { t.plan(1) - const r = new Readable({ objectMode: true }) @@ -114,57 +171,45 @@ test('can read strings as objects', function (t) { r.push(str) }) r.push(null) - r.pipe( toArray(function (array) { t.deepEqual(array, list) }) ) }) - test('read(0) for object streams', function (t) { t.plan(1) - const r = new Readable({ objectMode: true }) r._read = noop - r.push('foobar') r.push(null) - r.read(0) - r.pipe( toArray(function (array) { t.deepEqual(array, ['foobar']) }) ) }) - test('falsey values', function (t) { t.plan(1) - const r = new Readable({ objectMode: true }) r._read = noop - r.push(false) r.push(0) r.push('') r.push(null) - r.pipe( toArray(function (array) { t.deepEqual(array, [false, 0, '']) }) ) }) - test('high watermark _read', function (t) { t.plan(5) - const r = new Readable({ highWaterMark: 6, objectMode: true @@ -179,55 +224,53 @@ test('high watermark _read', function (t) { forEach(list, function (c) { r.push(c) }) - const v = r.read() - t.equal(calls, 0) t.equal(v, '1') - const v2 = r.read() t.equal(v2, '2') - const v3 = r.read() t.equal(v3, '3') - t.equal(calls, 1) }) - test('high watermark push', function (t) { t.plan(6) - const r = new Readable({ highWaterMark: 6, objectMode: true }) + r._read = function (n) {} + for (let i = 0; i < 6; i++) { const bool = r.push(i) t.equal(bool, i !== 5) } }) - test('can write objects to stream', function (t) { t.plan(1) - - const w = new Writable({ objectMode: true }) + const w = new Writable({ + objectMode: true + }) w._write = function (chunk, encoding, cb) { - t.deepEqual(chunk, { foo: 'bar' }) + t.deepEqual(chunk, { + foo: 'bar' + }) cb() } w.on('finish', function () {}) - - w.write({ foo: 'bar' }) + w.write({ + foo: 'bar' + }) w.end() }) - test('can write multiple objects to stream', function (t) { t.plan(1) - - const w = new Writable({ objectMode: true }) + const w = new Writable({ + objectMode: true + }) const list = [] w._write = function (chunk, encoding, cb) { @@ -238,7 +281,6 @@ test('can write multiple objects to stream', function (t) { w.on('finish', function () { t.deepEqual(list, [0, 1, 2, 3, 4]) }) - w.write(0) w.write(1) w.write(2) @@ -246,10 +288,8 @@ test('can write multiple objects to stream', function (t) { w.write(4) w.end() }) - test('can write strings as objects', function (t) { t.plan(1) - const w = new Writable({ objectMode: true }) @@ -263,7 +303,6 @@ test('can write strings as objects', function (t) { w.on('finish', function () { t.deepEqual(list, ['0', '1', '2', '3', '4']) }) - w.write('0') w.write('1') w.write('2') @@ -271,10 +310,8 @@ test('can write strings as objects', function (t) { w.write('4') w.end() }) - test('buffers finish until cb is called', function (t) { t.plan(2) - const w = new Writable({ objectMode: true }) @@ -282,7 +319,6 @@ test('buffers finish until cb is called', function (t) { w._write = function (chunk, encoding, cb) { t.equal(chunk, 'foo') - process.nextTick(function () { called = true cb() @@ -292,7 +328,6 @@ test('buffers finish until cb is called', function (t) { w.on('finish', function () { t.equal(called, true) }) - w.write('foo') w.end() }) diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js index d198505229..e4c74506b9 100644 --- a/test/browser/test-stream2-pipe-error-handling.js +++ b/test/browser/test-stream2-pipe-error-handling.js @@ -1,14 +1,14 @@ 'use strict' const test = require('tape') + const stream = require('../../lib/ours/index') test('Error Listener Catches', function (t) { t.plan(3) - let count = 1000 - const source = new stream.Readable() + source._read = function (n) { n = Math.min(count, n) count -= n @@ -16,41 +16,38 @@ test('Error Listener Catches', function (t) { } let unpipedDest + source.unpipe = function (dest) { unpipedDest = dest stream.Readable.prototype.unpipe.call(this, dest) } const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { cb() } source.pipe(dest) - let gotErr = null dest.on('error', function (err) { gotErr = err }) - let unpipedSource dest.on('unpipe', function (src) { unpipedSource = src }) - const err = new Error('This stream turned into bacon.') dest.emit('error', err) t.strictEqual(gotErr, err) t.strictEqual(unpipedSource, source) t.strictEqual(unpipedDest, dest) }) - test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { t.plan(3) - let count = 1000 - const source = new stream.Readable() + source._read = function (n) { n = Math.min(count, n) count -= n @@ -58,26 +55,25 @@ test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) } let unpipedDest + source.unpipe = function (dest) { unpipedDest = dest stream.Readable.prototype.unpipe.call(this, dest) } const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { cb() } source.pipe(dest) - let unpipedSource dest.on('unpipe', function (src) { unpipedSource = src }) - const err = new Error('This stream turned into bacon.') const onerror = global.onerror - dest.emit('error', err) global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js index 071295d03a..da7020bcdd 100644 --- a/test/browser/test-stream2-pipe-error-once-listener.js +++ b/test/browser/test-stream2-pipe-error-once-listener.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const stream = require('../../lib/ours/index') test('pipe error once listener', function (t) { @@ -10,6 +12,7 @@ test('pipe error once listener', function (t) { const Read = function () { stream.Readable.call(this) } + inherits(Read, stream.Readable) Read.prototype._read = function (size) { @@ -20,6 +23,7 @@ test('pipe error once listener', function (t) { const Write = function () { stream.Writable.call(this) } + inherits(Write, stream.Writable) Write.prototype._write = function (buffer, encoding, cb) { @@ -29,11 +33,9 @@ test('pipe error once listener', function (t) { const read = new Read() const write = new Write() - write.once('error', () => {}) write.once('alldone', function () { t.ok(true) }) - read.pipe(write) }) diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js index f826852c46..5e6cdfb18e 100644 --- a/test/browser/test-stream2-push.js +++ b/test/browser/test-stream2-push.js @@ -1,17 +1,17 @@ 'use strict' const test = require('tape') + const { EventEmitter: EE } = require('events') + const { Readable, Writable } = require('../../lib/ours/index') test('push', function (t) { t.plan(33) - const stream = new Readable({ highWaterMark: 16, encoding: 'utf8' }) - const source = new EE() stream._read = function () { @@ -23,19 +23,16 @@ test('push', function (t) { stream.on('end', function () { ended = true }) - source.on('data', function (chunk) { - const ret = stream.push(chunk) - // console.error('data', stream._readableState.length); + const ret = stream.push(chunk) // console.error('data', stream._readableState.length); + if (!ret) { readStop() } }) - source.on('end', function () { stream.push(null) }) - let reading = false function readStart() { @@ -48,6 +45,7 @@ test('push', function (t) { reading = false process.nextTick(function () { const r = stream.read() + if (r !== null) { writer.write(r) } @@ -57,9 +55,7 @@ test('push', function (t) { const writer = new Writable({ decodeStrings: false }) - const written = [] - const expectWritten = [ 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', @@ -75,15 +71,13 @@ test('push', function (t) { process.nextTick(cb) } - writer.on('finish', finish) - - // now emit some chunks. + writer.on('finish', finish) // now emit some chunks. const chunk = 'asdfg' - let set = 0 readStart() data() + function data() { t.ok(reading) source.emit('data', chunk) @@ -94,6 +88,7 @@ test('push', function (t) { t.ok(reading) source.emit('data', chunk) t.notOk(reading) + if (set++ < 5) { setTimeout(data, 10) } else { diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js index 794f43d8d5..e1ab001ded 100644 --- a/test/browser/test-stream2-readable-empty-buffer-no-eof.js +++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -1,14 +1,12 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('readable empty buffer no eof 1', function (t) { t.plan(1) - - const r = new Readable() - - // should not end when we get a Buffer(0) or '' as the _read result + const r = new Readable() // should not end when we get a Buffer(0) or '' as the _read result // that just means that there is *temporarily* no data, but to go // ahead and try again later. // @@ -21,41 +19,53 @@ test('readable empty buffer no eof 1', function (t) { const buf = Buffer.alloc(5) buf.fill('x') let reads = 5 + r._read = function (n) { switch (reads--) { case 0: - return r.push(null) // EOF + return r.push(null) + // EOF + case 1: return r.push(buf) + case 2: setTimeout(r.read.bind(r, 0), 50) - return r.push(Buffer.alloc(0)) // Not-EOF! + return r.push(Buffer.alloc(0)) + // Not-EOF! + case 3: setTimeout(r.read.bind(r, 0), 50) return process.nextTick(function () { return r.push(Buffer.alloc(0)) }) + case 4: setTimeout(r.read.bind(r, 0), 50) return setTimeout(function () { return r.push(Buffer.alloc(0)) }) + case 5: return setTimeout(function () { return r.push(buf) }) + default: throw new Error('unreachable') } } const results = [] + function flow() { let chunk + while ((chunk = r.read()) !== null) { results.push(chunk + '') } } + r.on('readable', flow) r.on('end', function () { results.push('EOF') @@ -63,12 +73,13 @@ test('readable empty buffer no eof 1', function (t) { }) flow() }) - test('readable empty buffer no eof 2', function (t) { t.plan(1) - - const r = new Readable({ encoding: 'base64' }) + const r = new Readable({ + encoding: 'base64' + }) let reads = 5 + r._read = function (n) { if (!reads--) { return r.push(null) // EOF @@ -78,12 +89,15 @@ test('readable empty buffer no eof 2', function (t) { } const results = [] + function flow() { let chunk + while ((chunk = r.read()) !== null) { results.push(chunk + '') } } + r.on('readable', flow) r.on('end', function () { results.push('EOF') diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js index 2bc0809c07..a2957c2be4 100644 --- a/test/browser/test-stream2-readable-from-list.js +++ b/test/browser/test-stream2-readable-from-list.js @@ -1,65 +1,84 @@ 'use strict' const test = require('tape') + const { _fromList: fromList } = require('../../lib/_stream_readable') + const BufferList = require('../../lib/internal/streams/buffer_list') function bufferListFromArray(arr) { const bl = new BufferList() + for (let i = 0; i < arr.length; ++i) { bl.push(arr[i]) } + return bl } test('buffers', function (t) { t.plan(5) - let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] - list = bufferListFromArray(list) - - // read more than the first element. - let ret = fromList(6, { buffer: list, length: 16 }) - t.equal(ret.toString(), 'foogba') - - // read exactly the first element. - ret = fromList(2, { buffer: list, length: 10 }) - t.equal(ret.toString(), 'rk') - - // read less than the first element. - ret = fromList(2, { buffer: list, length: 8 }) - t.equal(ret.toString(), 'ba') + list = bufferListFromArray(list) // read more than the first element. + + let ret = fromList(6, { + buffer: list, + length: 16 + }) + t.equal(ret.toString(), 'foogba') // read exactly the first element. + + ret = fromList(2, { + buffer: list, + length: 10 + }) + t.equal(ret.toString(), 'rk') // read less than the first element. + + ret = fromList(2, { + buffer: list, + length: 8 + }) + t.equal(ret.toString(), 'ba') // read more than we have. + + ret = fromList(100, { + buffer: list, + length: 6 + }) + t.equal(ret.toString(), 'zykuel') // all consumed. - // read more than we have. - ret = fromList(100, { buffer: list, length: 6 }) - t.equal(ret.toString(), 'zykuel') - - // all consumed. t.same(list, new BufferList()) }) - test('strings', function (t) { t.plan(5) - let list = ['foog', 'bark', 'bazy', 'kuel'] - list = bufferListFromArray(list) - - // read more than the first element. - let ret = fromList(6, { buffer: list, length: 16, decoder: true }) - t.equal(ret, 'foogba') - - // read exactly the first element. - ret = fromList(2, { buffer: list, length: 10, decoder: true }) - t.equal(ret, 'rk') - - // read less than the first element. - ret = fromList(2, { buffer: list, length: 8, decoder: true }) - t.equal(ret, 'ba') - - // read more than we have. - ret = fromList(100, { buffer: list, length: 6, decoder: true }) - t.equal(ret, 'zykuel') + list = bufferListFromArray(list) // read more than the first element. + + let ret = fromList(6, { + buffer: list, + length: 16, + decoder: true + }) + t.equal(ret, 'foogba') // read exactly the first element. + + ret = fromList(2, { + buffer: list, + length: 10, + decoder: true + }) + t.equal(ret, 'rk') // read less than the first element. + + ret = fromList(2, { + buffer: list, + length: 8, + decoder: true + }) + t.equal(ret, 'ba') // read more than we have. + + ret = fromList(100, { + buffer: list, + length: 6, + decoder: true + }) + t.equal(ret, 'zykuel') // all consumed. - // all consumed. t.same(list, new BufferList()) }) diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js index ab72df33f7..92d73eefde 100644 --- a/test/browser/test-stream2-readable-legacy-drain.js +++ b/test/browser/test-stream2-readable-legacy-drain.js @@ -1,14 +1,15 @@ 'use strict' const test = require('tape') + const { Stream, Readable } = require('../../lib/ours/index') test('readable legacy drain', function (t) { t.plan(3) - const r = new Readable() const N = 256 let reads = 0 + r._read = function (n) { return r.push(++reads === N ? null : Buffer.alloc(1)) } @@ -16,11 +17,11 @@ test('readable legacy drain', function (t) { r.on('end', function () { t.ok(true, 'rended') }) - const w = new Stream() w.writable = true let writes = 0 let buffered = 0 + w.write = function (c) { writes += c.length buffered += c.length @@ -32,6 +33,7 @@ test('readable legacy drain', function (t) { if (buffered > 3) { t.ok(false, 'to much buffer') } + buffered = 0 w.emit('drain') } diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js index dd7bbbc6e3..032a9ae21c 100644 --- a/test/browser/test-stream2-readable-non-empty-end.js +++ b/test/browser/test-stream2-readable-non-empty-end.js @@ -1,13 +1,14 @@ 'use strict' const test = require('tape') + const { Readable } = require('../../lib/ours/index') test('non empty end', function (t) { t.plan(4) - let len = 0 const chunks = new Array(10) + for (let i = 1; i <= 10; i++) { chunks[i - 1] = Buffer.alloc(i) len += i @@ -15,6 +16,7 @@ test('non empty end', function (t) { const test = new Readable() let n = 0 + test._read = function (size) { const chunk = chunks[n++] setTimeout(function () { @@ -23,6 +25,7 @@ test('non empty end', function (t) { } test.on('end', thrower) + function thrower() { throw new Error('this should not happen!') } @@ -31,11 +34,13 @@ test('non empty end', function (t) { test.on('readable', function () { const b = len - bytesread - 1 const res = test.read(b) + if (res) { - bytesread += res.length - // console.error('br=%d len=%d', bytesread, len); + bytesread += res.length // console.error('br=%d len=%d', bytesread, len); + setTimeout(next) } + test.read(0) }) test.read(0) @@ -43,12 +48,10 @@ test('non empty end', function (t) { function next() { // now let's make 'end' happen test.removeListener('end', thrower) - test.on('end', function () { t.ok(true, 'end emitted') - }) + }) // one to get the last byte - // one to get the last byte let r = test.read() t.ok(r) t.equal(r.length, 1) diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js index 2f86c95ce0..021bd4fdb8 100644 --- a/test/browser/test-stream2-readable-wrap-empty.js +++ b/test/browser/test-stream2-readable-wrap-empty.js @@ -1,23 +1,24 @@ 'use strict' const test = require('tape') + const { EventEmitter: EE } = require('events') + const Readable = require('../../lib/ours/index') test('wrap empty', function (t) { t.plan(1) - const oldStream = new EE() + oldStream.pause = function () {} + oldStream.resume = function () {} const newStream = new Readable().wrap(oldStream) - newStream .on('readable', function () {}) .on('end', function () { t.ok(true, 'ended') }) - oldStream.emit('end') }) diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js index e3edc32253..c26d40f74f 100644 --- a/test/browser/test-stream2-readable-wrap.js +++ b/test/browser/test-stream2-readable-wrap.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const { EventEmitter: EE } = require('events') + const { Readable, Writable } = require('../../lib/ours/index') let run = 0 @@ -9,11 +11,12 @@ let run = 0 function runTest(highWaterMark, objectMode, produce) { test('run #' + ++run, (t) => { t.plan(4) - const old = new EE() - const r = new Readable({ highWaterMark: highWaterMark, objectMode: objectMode }) + const r = new Readable({ + highWaterMark: highWaterMark, + objectMode: objectMode + }) t.equal(r, r.wrap(old)) - let ended = false r.on('end', function () { ended = true @@ -35,25 +38,30 @@ function runTest(highWaterMark, objectMode, produce) { let chunks = 10 let oldEnded = false const expected = [] + function flow() { - flowing = true - // eslint-disable-next-line no-unmodified-loop-condition + flowing = true // eslint-disable-next-line no-unmodified-loop-condition + while (flowing && chunks-- > 0) { const item = produce() - expected.push(item) - // console.log('old.emit', chunks, flowing); - old.emit('data', item) - // console.log('after emit', chunks, flowing); + expected.push(item) // console.log('old.emit', chunks, flowing); + + old.emit('data', item) // console.log('after emit', chunks, flowing); } + if (chunks <= 0) { - oldEnded = true - // console.log('old end', chunks, flowing); + oldEnded = true // console.log('old end', chunks, flowing); + old.emit('end') } } - const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode: objectMode }) + const w = new Writable({ + highWaterMark: highWaterMark * 2, + objectMode: objectMode + }) const written = [] + w._write = function (chunk, encoding, cb) { // console.log('_write', chunk); written.push(chunk) @@ -63,9 +71,7 @@ function runTest(highWaterMark, objectMode, produce) { w.on('finish', function () { performAsserts() }) - r.pipe(w) - flow() function performAsserts() { @@ -79,16 +85,28 @@ function runTest(highWaterMark, objectMode, produce) { runTest(100, false, function () { return Buffer.alloc(100) }) - runTest(10, false, function () { return Buffer.from('xxxxxxxxxx') }) - runTest(1, true, function () { - return { foo: 'bar' } + return { + foo: 'bar' + } }) - -const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555] +const objectChunks = [ + 5, + 'a', + false, + 0, + '', + 'xyz', + { + x: 4 + }, + 7, + [], + 555 +] runTest(1, true, function () { return objectChunks.shift() }) diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js index 783a424c88..c395de7579 100644 --- a/test/browser/test-stream2-set-encoding.js +++ b/test/browser/test-stream2-set-encoding.js @@ -1,14 +1,15 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Readable } = require('../../lib/ours/index') inherits(TestReader, Readable) function TestReader(n, opts) { Readable.call(this, opts) - this.pos = 0 this.len = n || 100 } @@ -23,6 +24,7 @@ TestReader.prototype._read = function (n) { } n = Math.min(n, this.len - this.pos) + if (n <= 0) { // double push(null) to test eos handling this.push(null) @@ -31,9 +33,7 @@ TestReader.prototype._read = function (n) { this.pos += n const ret = Buffer.alloc(n) - ret.fill('a') - - // console.log('this.push(ret)', ret); + ret.fill('a') // console.log('this.push(ret)', ret); return this.push(ret) }.bind(this), @@ -43,7 +43,6 @@ TestReader.prototype._read = function (n) { test('setEncoding utf8', function (t) { t.plan(1) - const tr = new TestReader(100) tr.setEncoding('utf8') const out = [] @@ -59,22 +58,19 @@ test('setEncoding utf8', function (t) { 'aaaaaaaaaa', 'aaaaaaaaaa' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(10)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('setEncoding hex', function (t) { t.plan(1) - const tr = new TestReader(100) tr.setEncoding('hex') const out = [] @@ -100,22 +96,19 @@ test('setEncoding hex', function (t) { '6161616161', '6161616161' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(10)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('setEncoding hex with read(13)', function (t) { t.plan(1) - const tr = new TestReader(100) tr.setEncoding('hex') const out = [] @@ -137,24 +130,21 @@ test('setEncoding hex with read(13)', function (t) { '6161616161616', '16161' ] - tr.on('readable', function flow() { // console.log('readable once'); let chunk + while ((chunk = tr.read(13)) !== null) { out.push(chunk) } }) - tr.on('end', function () { // console.log('END'); t.same(out, expect) }) }) - test('setEncoding base64', function (t) { t.plan(1) - const tr = new TestReader(100) tr.setEncoding('base64') const out = [] @@ -174,23 +164,22 @@ test('setEncoding base64', function (t) { 'YWFhYWFhYW', 'FhYQ==' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(10)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('encoding: utf8', function (t) { t.plan(1) - - const tr = new TestReader(100, { encoding: 'utf8' }) + const tr = new TestReader(100, { + encoding: 'utf8' + }) const out = [] const expect = [ 'aaaaaaaaaa', @@ -204,23 +193,22 @@ test('encoding: utf8', function (t) { 'aaaaaaaaaa', 'aaaaaaaaaa' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(10)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('encoding: hex', function (t) { t.plan(1) - - const tr = new TestReader(100, { encoding: 'hex' }) + const tr = new TestReader(100, { + encoding: 'hex' + }) const out = [] const expect = [ '6161616161', @@ -244,23 +232,22 @@ test('encoding: hex', function (t) { '6161616161', '6161616161' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(10)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('encoding: hex with read(13)', function (t) { t.plan(1) - - const tr = new TestReader(100, { encoding: 'hex' }) + const tr = new TestReader(100, { + encoding: 'hex' + }) const out = [] const expect = [ '6161616161616', @@ -280,23 +267,22 @@ test('encoding: hex with read(13)', function (t) { '6161616161616', '16161' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(13)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('encoding: base64', function (t) { t.plan(1) - - const tr = new TestReader(100, { encoding: 'base64' }) + const tr = new TestReader(100, { + encoding: 'base64' + }) const out = [] const expect = [ 'YWFhYWFhYW', @@ -314,22 +300,19 @@ test('encoding: base64', function (t) { 'YWFhYWFhYW', 'FhYQ==' ] - tr.on('readable', function flow() { let chunk + while ((chunk = tr.read(10)) !== null) { out.push(chunk) } }) - tr.on('end', function () { t.same(out, expect) }) }) - test('chainable', function (t) { t.plan(1) - const tr = new TestReader(100) t.equal(tr.setEncoding('utf8'), tr) }) diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js index 43a1fc5afc..9237ef7694 100644 --- a/test/browser/test-stream2-transform.js +++ b/test/browser/test-stream2-transform.js @@ -1,16 +1,16 @@ 'use strict' const test = require('tape') + const { PassThrough, Transform } = require('../../lib/ours/index') test('writable side consumption', function (t) { t.plan(3) - const tx = new Transform({ highWaterMark: 10 }) - let transformed = 0 + tx._transform = function (chunk, encoding, cb) { transformed += chunk.length tx.push(chunk) @@ -20,8 +20,8 @@ test('writable side consumption', function (t) { for (let i = 1; i <= 10; i++) { tx.write(Buffer.alloc(i)) } - tx.end() + tx.end() t.equal(tx._readableState.length, 10) t.equal(transformed, 10) t.same( @@ -31,51 +31,48 @@ test('writable side consumption', function (t) { [5, 6, 7, 8, 9, 10] ) }) - test('passthrough', function (t) { t.plan(4) - const pt = new PassThrough() - pt.write(Buffer.from('foog')) pt.write(Buffer.from('bark')) pt.write(Buffer.from('bazy')) pt.write(Buffer.from('kuel')) pt.end() - t.equal(pt.read(5).toString(), 'foogb') t.equal(pt.read(5).toString(), 'arkba') t.equal(pt.read(5).toString(), 'zykue') t.equal(pt.read(5).toString(), 'l') }) - test('object passthrough', function (t) { t.plan(7) - - const pt = new PassThrough({ objectMode: true }) - + const pt = new PassThrough({ + objectMode: true + }) pt.write(1) pt.write(true) pt.write(false) pt.write(0) pt.write('foo') pt.write('') - pt.write({ a: 'b' }) + pt.write({ + a: 'b' + }) pt.end() - t.equal(pt.read(), 1) t.equal(pt.read(), true) t.equal(pt.read(), false) t.equal(pt.read(), 0) t.equal(pt.read(), 'foo') t.equal(pt.read(), '') - t.same(pt.read(), { a: 'b' }) + t.same(pt.read(), { + a: 'b' + }) }) - test('simple transform', function (t) { t.plan(4) - const pt = new Transform() + pt._transform = function (c, e, cb) { const ret = Buffer.alloc(c.length) ret.fill('x') @@ -88,17 +85,17 @@ test('simple transform', function (t) { pt.write(Buffer.from('bazy')) pt.write(Buffer.from('kuel')) pt.end() - t.equal(pt.read(5).toString(), 'xxxxx') t.equal(pt.read(5).toString(), 'xxxxx') t.equal(pt.read(5).toString(), 'xxxxx') t.equal(pt.read(5).toString(), 'x') }) - test('simple object transform', function (t) { t.plan(7) + const pt = new Transform({ + objectMode: true + }) - const pt = new Transform({ objectMode: true }) pt._transform = function (c, e, cb) { pt.push(JSON.stringify(c)) cb() @@ -110,9 +107,10 @@ test('simple object transform', function (t) { pt.write(0) pt.write('foo') pt.write('') - pt.write({ a: 'b' }) + pt.write({ + a: 'b' + }) pt.end() - t.equal(pt.read(), '1') t.equal(pt.read(), 'true') t.equal(pt.read(), 'false') @@ -121,11 +119,10 @@ test('simple object transform', function (t) { t.equal(pt.read(), '""') t.equal(pt.read(), '{"a":"b"}') }) - test('async passthrough', function (t) { t.plan(4) - const pt = new Transform() + pt._transform = function (chunk, encoding, cb) { setTimeout(function () { pt.push(chunk) @@ -138,7 +135,6 @@ test('async passthrough', function (t) { pt.write(Buffer.from('bazy')) pt.write(Buffer.from('kuel')) pt.end() - pt.on('finish', function () { t.equal(pt.read(5).toString(), 'foogb') t.equal(pt.read(5).toString(), 'arkba') @@ -146,13 +142,10 @@ test('async passthrough', function (t) { t.equal(pt.read(5).toString(), 'l') }) }) - test('assymetric transform (expand)', function (t) { t.plan(7) + const pt = new Transform() // emit each chunk 2 times. - const pt = new Transform() - - // emit each chunk 2 times. pt._transform = function (chunk, encoding, cb) { setTimeout(function () { pt.push(chunk) @@ -168,7 +161,6 @@ test('assymetric transform (expand)', function (t) { pt.write(Buffer.from('bazy')) pt.write(Buffer.from('kuel')) pt.end() - pt.on('finish', function () { t.equal(pt.read(5).toString(), 'foogf') t.equal(pt.read(5).toString(), 'oogba') @@ -179,28 +171,28 @@ test('assymetric transform (expand)', function (t) { t.equal(pt.read(5).toString(), 'el') }) }) - test('assymetric transform (compress)', function (t) { t.plan(3) - - const pt = new Transform() - - // each output is the first char of 3 consecutive chunks, + const pt = new Transform() // each output is the first char of 3 consecutive chunks, // or whatever's left. + pt.state = '' pt._transform = function (chunk, encoding, cb) { if (!chunk) { chunk = '' } + const s = chunk.toString() setTimeout( function () { this.state += s.charAt(0) + if (this.state.length === 3) { pt.push(Buffer.from(this.state)) this.state = '' } + cb() }.bind(this), 10 @@ -228,24 +220,24 @@ test('assymetric transform (compress)', function (t) { pt.write(Buffer.from('bbbb')) pt.write(Buffer.from('cccc')) pt.write(Buffer.from('dddd')) - pt.end() + pt.end() // 'abcdeabcdeabcd' - // 'abcdeabcdeabcd' pt.on('finish', function () { t.equal(pt.read(5).toString(), 'abcde') t.equal(pt.read(5).toString(), 'abcde') t.equal(pt.read(5).toString(), 'abcd') }) -}) - -// this tests for a stall when data is written to a full stream +}) // this tests for a stall when data is written to a full stream // that has empty transforms. + test('complex transform', function (t) { t.plan(2) - let count = 0 let saved = null - const pt = new Transform({ highWaterMark: 3 }) + const pt = new Transform({ + highWaterMark: 3 + }) + pt._transform = function (c, e, cb) { if (count++ === 1) { saved = c @@ -254,6 +246,7 @@ test('complex transform', function (t) { pt.push(saved) saved = null } + pt.push(c) } @@ -270,91 +263,67 @@ test('complex transform', function (t) { t.equal(pt.read(), null) }) }) - pt.write(Buffer.from('abc')) }) - test('passthrough event emission', function (t) { t.plan(11) - const pt = new PassThrough() let emits = 0 pt.on('readable', function () { // console.error('>>> emit readable %d', emits); emits++ }) + pt.write(Buffer.from('foog')) // console.error('need emit 0'); - pt.write(Buffer.from('foog')) - - // console.error('need emit 0'); pt.write(Buffer.from('bark')) - setTimeout(() => { // console.error('should have emitted readable now 1 === %d', emits) t.equal(emits, 1) - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5) + '', 'null') + t.equal(pt.read(5) + '', 'null') // console.error('need emit 1'); - // console.error('need emit 1'); + pt.write(Buffer.from('bazy')) // console.error('should have emitted, but not again'); - pt.write(Buffer.from('bazy')) - // console.error('should have emitted, but not again'); - pt.write(Buffer.from('kuel')) + pt.write(Buffer.from('kuel')) // console.error('should have emitted readable now 2 === %d', emits); - // console.error('should have emitted readable now 2 === %d', emits); setTimeout(() => { t.equal(emits, 2) - t.equal(pt.read(5).toString(), 'arkba') t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5), null) - - // console.error('need emit 2'); + t.equal(pt.read(5), null) // console.error('need emit 2'); pt.end() - setTimeout(() => { t.equal(emits, 3) - t.equal(pt.read(5).toString(), 'l') - t.equal(pt.read(5), null) + t.equal(pt.read(5), null) // console.error('should not have emitted again'); - // console.error('should not have emitted again'); t.equal(emits, 3) }) }) }) }) - test('passthrough event emission reordered', function (t) { t.plan(10) - const pt = new PassThrough() let emits = 0 pt.on('readable', function () { // console.error('emit readable', emits); emits++ }) + pt.write(Buffer.from('foog')) // console.error('need emit 0'); - pt.write(Buffer.from('foog')) - // console.error('need emit 0'); pt.write(Buffer.from('bark')) - setTimeout(() => { // console.error('should have emitted readable now 1 === %d', emits); t.equal(emits, 1) - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5), null) + t.equal(pt.read(5), null) // console.error('need emit 1'); - // console.error('need emit 1'); pt.once('readable', function () { t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5), null) // console.error('need emit 2'); - t.equal(pt.read(5), null) - - // console.error('need emit 2'); pt.once('readable', function () { t.equal(pt.read(5).toString(), 'zykue') t.equal(pt.read(5), null) @@ -367,25 +336,20 @@ test('passthrough event emission reordered', function (t) { }) pt.write(Buffer.from('kuel')) }) - pt.write(Buffer.from('bazy')) }) }) - test('passthrough facaded', function (t) { - t.plan(1) + t.plan(1) // console.error('passthrough facaded'); - // console.error('passthrough facaded'); const pt = new PassThrough() const datas = [] pt.on('data', function (chunk) { datas.push(chunk.toString()) }) - pt.on('end', function () { t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) }) - pt.write(Buffer.from('foog')) setTimeout(function () { pt.write(Buffer.from('bark')) @@ -400,12 +364,13 @@ test('passthrough facaded', function (t) { }, 10) }, 10) }) - test('object transform (json parse)', function (t) { - t.plan(5) + t.plan(5) // console.error('json parse stream'); + + const jp = new Transform({ + objectMode: true + }) - // console.error('json parse stream'); - const jp = new Transform({ objectMode: true }) jp._transform = function (data, encoding, cb) { try { jp.push(JSON.parse(data)) @@ -413,37 +378,50 @@ test('object transform (json parse)', function (t) { } catch (er) { cb(er) } - } - - // anything except null/undefined is fine. + } // anything except null/undefined is fine. // those are "magic" in the stream API, because they signal EOF. - const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + const objects = [ + { + foo: 'bar' + }, + 100, + 'string', + { + nested: { + things: [ + { + foo: 'bar' + }, + 100, + 'string' + ] + } + } + ] let ended = false jp.on('end', function () { ended = true }) - forEach(objects, function (obj) { jp.write(JSON.stringify(obj)) const res = jp.read() t.same(res, obj) }) + jp.end() // read one more time to get the 'end' event - jp.end() - // read one more time to get the 'end' event jp.read() - process.nextTick(function () { t.ok(ended) }) }) - test('object transform (json stringify)', function (t) { - t.plan(5) + t.plan(5) // console.error('json parse stream'); + + const js = new Transform({ + objectMode: true + }) - // console.error('json parse stream'); - const js = new Transform({ objectMode: true }) js._transform = function (data, encoding, cb) { try { js.push(JSON.stringify(data)) @@ -451,27 +429,39 @@ test('object transform (json stringify)', function (t) { } catch (er) { cb(er) } - } - - // anything except null/undefined is fine. + } // anything except null/undefined is fine. // those are "magic" in the stream API, because they signal EOF. - const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + const objects = [ + { + foo: 'bar' + }, + 100, + 'string', + { + nested: { + things: [ + { + foo: 'bar' + }, + 100, + 'string' + ] + } + } + ] let ended = false js.on('end', function () { ended = true }) - forEach(objects, function (obj) { js.write(obj) const res = js.read() t.equal(res, JSON.stringify(obj)) }) + js.end() // read one more time to get the 'end' event - js.end() - // read one more time to get the 'end' event js.read() - process.nextTick(function () { t.ok(ended) }) diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js index 72b137a0c1..4adff6c03d 100644 --- a/test/browser/test-stream2-unpipe-drain.js +++ b/test/browser/test-stream2-unpipe-drain.js @@ -1,8 +1,11 @@ 'use strict' const test = require('tape') + const crypto = require('crypto') + const inherits = require('inherits') + const stream = require('../../lib/ours/index') test('unpipe drain', function (t) { @@ -19,6 +22,7 @@ test('unpipe drain', function (t) { function TestWriter() { stream.Writable.call(this) } + inherits(TestWriter, stream.Writable) TestWriter.prototype._write = function (buffer, encoding, callback) { @@ -32,6 +36,7 @@ test('unpipe drain', function (t) { stream.Readable.call(this) this.reads = 0 } + inherits(TestReader, stream.Readable) TestReader.prototype._read = function (size) { @@ -41,13 +46,10 @@ test('unpipe drain', function (t) { const src1 = new TestReader() const src2 = new TestReader() - src1.pipe(dest) - src1.once('readable', function () { process.nextTick(function () { src2.pipe(dest) - src2.once('readable', function () { process.nextTick(function () { src1.unpipe(dest) @@ -55,7 +57,6 @@ test('unpipe drain', function (t) { }) }) }) - dest.on('unpipe', function () { t.equal(src1.reads, 2) t.equal(src2.reads, 1) diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js index 834ab94523..a04d7c6eb3 100644 --- a/test/browser/test-stream2-writable.js +++ b/test/browser/test-stream2-writable.js @@ -1,7 +1,9 @@ 'use strict' const test = require('tape') + const inherits = require('inherits') + const { Duplex, Writable } = require('../../lib/ours/index') inherits(TestWriter, Writable) @@ -23,6 +25,7 @@ TestWriter.prototype._write = function (chunk, encoding, cb) { Math.floor(Math.random() * 10) ) } + inherits(Processstdout, Writable) function Processstdout() { @@ -35,7 +38,9 @@ Processstdout.prototype._write = function (chunk, encoding, cb) { // console.log(chunk.toString()); cb() } + const chunks = new Array(50) + for (let i = 0; i < chunks.length; i++) { chunks[i] = new Array(i + 1).join('x') } @@ -46,36 +51,31 @@ if (!process.stdout) { test('write fast', function (t) { t.plan(1) - const tw = new TestWriter({ highWaterMark: 100 }) - tw.on('finish', function () { t.same(tw.buffer, chunks, 'got chunks in the right order') }) - forEach(chunks, function (chunk) { // screw backpressure. Just buffer it all up. tw.write(chunk) }) tw.end() }) - test('write slow', function (t) { t.plan(1) - const tw = new TestWriter({ highWaterMark: 100 }) - tw.on('finish', function () { t.same(tw.buffer, chunks, 'got chunks in the right order') }) - let i = 0 + ;(function W() { tw.write(chunks[i++]) + if (i < chunks.length) { setTimeout(W, 10) } else { @@ -83,28 +83,24 @@ test('write slow', function (t) { } })() }) - test('write backpressure', function (t) { t.plan(19) - const tw = new TestWriter({ highWaterMark: 50 }) - let drains = 0 - tw.on('finish', function () { t.same(tw.buffer, chunks, 'got chunks in the right order') t.equal(drains, 17) }) - tw.on('drain', function () { drains++ }) - let i = 0 + ;(function W() { let ret + do { ret = tw.write(chunks[i++]) } while (ret !== false && i < chunks.length) @@ -117,14 +113,11 @@ test('write backpressure', function (t) { } })() }) - test('write bufferize', function (t) { t.plan(50) - const tw = new TestWriter({ highWaterMark: 100 }) - const encodings = [ 'hex', 'utf8', @@ -138,13 +131,11 @@ test('write bufferize', function (t) { 'utf-16le', undefined ] - tw.on('finish', function () { forEach(chunks, function (chunk, i) { const actual = Buffer.from(tw.buffer[i]) - chunk = Buffer.from(chunk) + chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes - // Some combination of encoding and length result in the last byte replaced by two extra null bytes if (actual[actual.length - 1] === 0) { chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) } @@ -152,7 +143,6 @@ test('write bufferize', function (t) { t.same(actual, chunk, 'got the expected chunks ' + i) }) }) - forEach(chunks, function (chunk, i) { const enc = encodings[i % encodings.length] chunk = Buffer.from(chunk) @@ -160,10 +150,8 @@ test('write bufferize', function (t) { }) tw.end() }) - test('write no bufferize', function (t) { t.plan(100) - const tw = new TestWriter({ highWaterMark: 100, decodeStrings: false @@ -188,13 +176,11 @@ test('write no bufferize', function (t) { 'utf-16le', undefined ] - tw.on('finish', function () { forEach(chunks, function (chunk, i) { const actual = Buffer.from(tw.buffer[i]) - chunk = Buffer.from(chunk) + chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes - // Some combination of encoding and length result in the last byte replaced by two extra null bytes if (actual[actual.length - 1] === 0) { chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) } @@ -202,7 +188,6 @@ test('write no bufferize', function (t) { t.same(actual, chunk, 'got the expected chunks ' + i) }) }) - forEach(chunks, function (chunk, i) { const enc = encodings[i % encodings.length] chunk = Buffer.from(chunk) @@ -210,10 +195,8 @@ test('write no bufferize', function (t) { }) tw.end() }) - test('write callbacks', function (t) { t.plan(2) - const callbacks = chunks .map(function (chunk, i) { return [ @@ -228,64 +211,51 @@ test('write callbacks', function (t) { return set }, {}) callbacks._called = [] - const tw = new TestWriter({ highWaterMark: 100 }) - tw.on('finish', function () { process.nextTick(function () { t.same(tw.buffer, chunks, 'got chunks in the right order') t.same(callbacks._called, chunks, 'called all callbacks') }) }) - forEach(chunks, function (chunk, i) { tw.write(chunk, callbacks['callback-' + i]) }) tw.end() }) - test('end callback', function (t) { t.plan(1) - const tw = new TestWriter() tw.end(() => { t.ok(true) }) }) - test('end callback with chunk', function (t) { t.plan(1) - const tw = new TestWriter() tw.end(Buffer.from('hello world'), () => { t.ok(true) }) }) - test('end callback with chunk and encoding', function (t) { t.plan(1) - const tw = new TestWriter() tw.end('hello world', 'ascii', () => { t.ok(true) }) }) - test('end callback after .write() call', function (t) { t.plan(1) - const tw = new TestWriter() tw.write(Buffer.from('hello world')) tw.end(() => { t.ok(true) }) }) - test('end callback called after write callback', function (t) { t.plan(1) - const tw = new TestWriter() let writeCalledback = false tw.write(Buffer.from('hello world'), function () { @@ -295,24 +265,26 @@ test('end callback called after write callback', function (t) { t.equal(writeCalledback, true) }) }) - test('encoding should be ignored for buffers', function (t) { t.plan(1) - const tw = new Writable() const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' + tw._write = function (chunk, encoding, cb) { t.equal(chunk.toString('hex'), hex) } + const buf = Buffer.from(hex, 'hex') tw.write(buf, 'binary') }) - test('writables are not pipable', function (t) { t.plan(1) + const w = new Writable({ + autoDestroy: false + }) - const w = new Writable({ autoDestroy: false }) w._write = function () {} + let gotError = false w.on('error', function (er) { gotError = true @@ -320,13 +292,14 @@ test('writables are not pipable', function (t) { w.pipe(process.stdout) t.ok(gotError) }) - test('duplexes are pipable', function (t) { t.plan(1) - const d = new Duplex() + d._read = function () {} + d._write = function () {} + let gotError = false d.on('error', function (er) { gotError = true @@ -334,12 +307,12 @@ test('duplexes are pipable', function (t) { d.pipe(process.stdout) t.notOk(gotError) }) - test('end(chunk) two times is an error', function (t) { t.plan(2) - const w = new Writable() + w._write = function () {} + let gotError = false w.on('error', function (er) { gotError = true @@ -351,12 +324,11 @@ test('end(chunk) two times is an error', function (t) { t.ok(gotError) }) }) - test('dont end while writing', function (t) { t.plan(2) - const w = new Writable() let wrote = false + w._write = function (chunk, e, cb) { t.notOk(this.writing) wrote = true @@ -366,39 +338,40 @@ test('dont end while writing', function (t) { cb() }) } + w.on('finish', function () { t.ok(wrote) }) w.write(Buffer.alloc(0)) w.end() }) - test('finish does not come before write cb', function (t) { t.plan(1) - const w = new Writable() let writeCb = false + w._write = function (chunk, e, cb) { setTimeout(function () { writeCb = true cb() }, 10) } + w.on('finish', function () { t.ok(writeCb) }) w.write(Buffer.alloc(0)) w.end() }) - test('finish does not come before sync _write cb', function (t) { t.plan(1) - const w = new Writable() let writeCb = false + w._write = function (chunk, e, cb) { cb() } + w.on('finish', function () { t.ok(writeCb) }) @@ -407,18 +380,17 @@ test('finish does not come before sync _write cb', function (t) { }) w.end() }) - test('finish is emitted if last chunk is empty', function (t) { t.plan(1) - const w = new Writable() + w._write = function (chunk, e, cb) { process.nextTick(cb) } + w.on('finish', () => { t.ok(true) }) - w.write(Buffer.alloc(1)) w.end(Buffer.alloc(0)) }) diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js index f7b22f7b46..f2f33b6026 100644 --- a/test/browser/test-stream3-pause-then-read.js +++ b/test/browser/test-stream3-pause-then-read.js @@ -1,18 +1,20 @@ 'use strict' const test = require('tape') + const { Readable, Writable } = require('../../lib/ours/index') test('pause then read', function (t) { t.plan(7) - const totalChunks = 100 const chunkSize = 99 const expectTotalData = totalChunks * chunkSize let expectEndingData = expectTotalData - - const r = new Readable({ highWaterMark: 1000 }) + const r = new Readable({ + highWaterMark: 1000 + }) let chunks = totalChunks + r._read = function (n) { if (!(chunks % 2)) { setImmediate(push) @@ -24,18 +26,20 @@ test('pause then read', function (t) { } let totalPushed = 0 + function push() { const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null + if (chunk) { totalPushed += chunk.length chunk.fill('x') } + r.push(chunk) } - read100() + read100() // first we read 100 bytes - // first we read 100 bytes function read100() { readn(100, onData) } @@ -45,6 +49,7 @@ test('pause then read', function (t) { expectEndingData -= n ;(function read() { const c = r.read(n) + if (!c) { r.once('readable', read) } else { @@ -53,49 +58,50 @@ test('pause then read', function (t) { then() } })() - } + } // then we listen to some data events - // then we listen to some data events function onData() { - expectEndingData -= 100 - // console.error('onData'); + expectEndingData -= 100 // console.error('onData'); + let seen = 0 r.on('data', function od(c) { seen += c.length + if (seen >= 100) { // seen enough r.removeListener('data', od) r.pause() + if (seen > 100) { // oh no, seen too much! // put the extra back. const diff = seen - 100 - r.unshift(c.slice(c.length - diff)) - // console.error('seen too much', seen, diff) - } + r.unshift(c.slice(c.length - diff)) // console.error('seen too much', seen, diff) + } // Nothing should be lost in between - // Nothing should be lost in between setImmediate(pipeLittle) } }) - } + } // Just pipe 200 bytes, then unshift the extra and unpipe - // Just pipe 200 bytes, then unshift the extra and unpipe function pipeLittle() { - expectEndingData -= 200 - // console.error('pipe a little'); + expectEndingData -= 200 // console.error('pipe a little'); + const w = new Writable() let written = 0 w.on('finish', function () { t.equal(written, 200) setImmediate(read1234) }) + w._write = function (chunk, encoding, cb) { written += chunk.length + if (written >= 200) { r.unpipe(w) w.end() cb() + if (written > 200) { const diff = written - 200 written -= diff @@ -105,10 +111,10 @@ test('pause then read', function (t) { setImmediate(cb) } } + r.pipe(w) - } + } // now read 1234 more bytes - // now read 1234 more bytes function read1234() { readn(1234, resumePause) } @@ -133,10 +139,12 @@ test('pause then read', function (t) { // console.error('pipe the rest'); const w = new Writable() let written = 0 + w._write = function (chunk, encoding, cb) { written += chunk.length cb() } + w.on('finish', function () { // console.error('written', written, totalPushed); t.equal(written, expectEndingData) diff --git a/test/common/fixtures.js b/test/common/fixtures.js index 3ee87e8b2d..d6fcd811b8 100644 --- a/test/common/fixtures.js +++ b/test/common/fixtures.js @@ -1,31 +1,32 @@ -'use strict'; +'use strict' -const path = require('path'); -const fs = require('fs'); -const { pathToFileURL } = require('url'); +const path = require('path') -const fixturesDir = path.join(__dirname, '..', 'fixtures'); +const fs = require('fs') + +const { pathToFileURL } = require('url') + +const fixturesDir = path.join(__dirname, '..', 'fixtures') function fixturesPath(...args) { - return path.join(fixturesDir, ...args); + return path.join(fixturesDir, ...args) } function fixturesFileURL(...args) { - return pathToFileURL(fixturesPath(...args)); + return pathToFileURL(fixturesPath(...args)) } function readFixtureSync(args, enc) { - if (Array.isArray(args)) - return fs.readFileSync(fixturesPath(...args), enc); - return fs.readFileSync(fixturesPath(args), enc); + if (Array.isArray(args)) return fs.readFileSync(fixturesPath(...args), enc) + return fs.readFileSync(fixturesPath(args), enc) } function readFixtureKey(name, enc) { - return fs.readFileSync(fixturesPath('keys', name), enc); + return fs.readFileSync(fixturesPath('keys', name), enc) } function readFixtureKeys(enc, ...names) { - return names.map((name) => readFixtureKey(name, enc)); + return names.map((name) => readFixtureKey(name, enc)) } module.exports = { @@ -34,5 +35,5 @@ module.exports = { fileURL: fixturesFileURL, readSync: readFixtureSync, readKey: readFixtureKey, - readKeys: readFixtureKeys, -}; + readKeys: readFixtureKeys +} diff --git a/test/common/fixtures.mjs b/test/common/fixtures.mjs index d6f7f6c092..372fabf88d 100644 --- a/test/common/fixtures.mjs +++ b/test/common/fixtures.mjs @@ -1,17 +1,5 @@ -import fixtures from './fixtures.js'; +import fixtures from './fixtures.js' -const { - fixturesDir, - path, - fileURL, - readSync, - readKey, -} = fixtures; +const { fixturesDir, path, fileURL, readSync, readKey } = fixtures -export { - fixturesDir, - path, - fileURL, - readSync, - readKey, -}; +export { fixturesDir, path, fileURL, readSync, readKey } diff --git a/test/common/index.js b/test/common/index.js index 147676c34a..c5c369f574 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -20,256 +20,270 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. /* eslint-disable node-core/crypto-check */ -'use strict'; -const process = global.process; // Some tests tamper with the process global. +'use strict' -const assert = require('assert'); -const { exec, execSync, spawnSync } = require('child_process'); -const fs = require('fs'); -// Do not require 'os' until needed so that test-os-checked-function can +const process = global.process // Some tests tamper with the process global. + +const assert = require('assert') + +const { exec, execSync, spawnSync } = require('child_process') + +const fs = require('fs') // Do not require 'os' until needed so that test-os-checked-function can // monkey patch it. If 'os' is required here, that test will fail. -const path = require('path'); -const util = require('util'); -const { isMainThread } = require('worker_threads'); -const tmpdir = require('./tmpdir'); -const bits = ['arm64', 'mips', 'mipsel', 'ppc64', 'riscv64', 's390x', 'x64'] - .includes(process.arch) ? 64 : 32; -const hasIntl = !!process.config.variables.v8_enable_i18n_support; +const path = require('path') -const { - atob, - btoa -} = require('buffer'); +const util = require('util') + +const { isMainThread } = require('worker_threads') -// Some tests assume a umask of 0o022 so set that up front. Tests that need a +const tmpdir = require('./tmpdir') + +const bits = ['arm64', 'mips', 'mipsel', 'ppc64', 'riscv64', 's390x', 'x64'].includes(process.arch) ? 64 : 32 +const hasIntl = !!process.config.variables.v8_enable_i18n_support + +const { atob, btoa } = require('buffer') // Some tests assume a umask of 0o022 so set that up front. Tests that need a // different umask will set it themselves. // // Workers can read, but not set the umask, so check that this is the main // thread. -if (isMainThread) - process.umask(0o022); - -const noop = () => {}; - -const hasCrypto = Boolean(process.versions.openssl) && - !process.env.NODE_SKIP_CRYPTO; -const hasOpenSSL3 = hasCrypto && - require('crypto').constants.OPENSSL_VERSION_NUMBER >= 805306368; +if (isMainThread) process.umask(0o022) -const hasQuic = hasCrypto && !!process.config.variables.openssl_quic; +const noop = () => {} -// Check for flags. Skip this for workers (both, the `cluster` module and +const hasCrypto = Boolean(process.versions.openssl) && !process.env.NODE_SKIP_CRYPTO +const hasOpenSSL3 = hasCrypto && require('crypto').constants.OPENSSL_VERSION_NUMBER >= 805306368 +const hasQuic = hasCrypto && !!process.config.variables.openssl_quic // Check for flags. Skip this for workers (both, the `cluster` module and // `worker_threads`) and child processes. // If the binary was built without-ssl then the crypto flags are // invalid (bad option). The test itself should handle this case. -if (process.argv.length === 2 && - !process.env.NODE_SKIP_FLAG_CHECK && - isMainThread && - hasCrypto && - require('cluster').isPrimary && - fs.existsSync(process.argv[1])) { + +if ( + process.argv.length === 2 && + !process.env.NODE_SKIP_FLAG_CHECK && + isMainThread && + hasCrypto && + require('cluster').isPrimary && + fs.existsSync(process.argv[1]) +) { // The copyright notice is relatively big and the flags could come afterwards. - const bytesToRead = 1500; - const buffer = Buffer.allocUnsafe(bytesToRead); - const fd = fs.openSync(process.argv[1], 'r'); - const bytesRead = fs.readSync(fd, buffer, 0, bytesToRead); - fs.closeSync(fd); - const source = buffer.toString('utf8', 0, bytesRead); - - const flagStart = source.indexOf('// Flags: --') + 10; + const bytesToRead = 1500 + const buffer = Buffer.allocUnsafe(bytesToRead) + const fd = fs.openSync(process.argv[1], 'r') + const bytesRead = fs.readSync(fd, buffer, 0, bytesToRead) + fs.closeSync(fd) + const source = buffer.toString('utf8', 0, bytesRead) + const flagStart = source.indexOf('// Flags: --') + 10 + if (flagStart !== 9) { - let flagEnd = source.indexOf('\n', flagStart); - // Normalize different EOL. + let flagEnd = source.indexOf('\n', flagStart) // Normalize different EOL. + if (source[flagEnd - 1] === '\r') { - flagEnd--; + flagEnd-- } - const flags = source - .substring(flagStart, flagEnd) - .replace(/_/g, '-') - .split(' '); - const args = process.execArgv.map((arg) => arg.replace(/_/g, '-')); + + const flags = source.substring(flagStart, flagEnd).replace(/_/g, '-').split(' ') + const args = process.execArgv.map((arg) => arg.replace(/_/g, '-')) + for (const flag of flags) { - if (!args.includes(flag) && - // If the binary is build without `intl` the inspect option is - // invalid. The test itself should handle this case. - (process.features.inspector || !flag.startsWith('--inspect'))) { + if ( + !args.includes(flag) && // If the binary is build without `intl` the inspect option is + // invalid. The test itself should handle this case. + (process.features.inspector || !flag.startsWith('--inspect')) + ) { console.log( 'NOTE: The test started as a child_process using these flags:', util.inspect(flags), 'Use NODE_SKIP_FLAG_CHECK to run the test with the original flags.' - ); - const args = [...flags, ...process.execArgv, ...process.argv.slice(1)]; - const options = { encoding: 'utf8', stdio: 'inherit' }; - const result = spawnSync(process.execPath, args, options); + ) + const args = [...flags, ...process.execArgv, ...process.argv.slice(1)] + const options = { + encoding: 'utf8', + stdio: 'inherit' + } + const result = spawnSync(process.execPath, args, options) + if (result.signal) { - process.kill(0, result.signal); + process.kill(0, result.signal) } else { - process.exit(result.status); + process.exit(result.status) } } } } } -const isWindows = process.platform === 'win32'; -const isAIX = process.platform === 'aix'; -const isSunOS = process.platform === 'sunos'; -const isFreeBSD = process.platform === 'freebsd'; -const isOpenBSD = process.platform === 'openbsd'; -const isLinux = process.platform === 'linux'; -const isOSX = process.platform === 'darwin'; +const isWindows = process.platform === 'win32' +const isAIX = process.platform === 'aix' +const isSunOS = process.platform === 'sunos' +const isFreeBSD = process.platform === 'freebsd' +const isOpenBSD = process.platform === 'openbsd' +const isLinux = process.platform === 'linux' +const isOSX = process.platform === 'darwin' + const isPi = (() => { try { + var _$exec + // Normal Raspberry Pi detection is to find the `Raspberry Pi` string in // the contents of `/sys/firmware/devicetree/base/model` but that doesn't // work inside a container. Match the chipset model number instead. - const cpuinfo = fs.readFileSync('/proc/cpuinfo', { encoding: 'utf8' }); - return /^Hardware\s*:\s*(.*)$/im.exec(cpuinfo)?.[1] === 'BCM2835'; + const cpuinfo = fs.readFileSync('/proc/cpuinfo', { + encoding: 'utf8' + }) + return ( + ((_$exec = /^Hardware\s*:\s*(.*)$/im.exec(cpuinfo)) === null || _$exec === undefined ? undefined : _$exec[1]) === + 'BCM2835' + ) } catch { - return false; + return false } -})(); - -const isDumbTerminal = process.env.TERM === 'dumb'; +})() -const buildType = process.config.target_defaults ? - process.config.target_defaults.default_configuration : - 'Release'; +const isDumbTerminal = process.env.TERM === 'dumb' +const buildType = process.config.target_defaults ? process.config.target_defaults.default_configuration : 'Release' // If env var is set then enable async_hook hooks for all tests. -// If env var is set then enable async_hook hooks for all tests. if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { - const destroydIdsList = {}; - const destroyListList = {}; - const initHandles = {}; - const { internalBinding } = require('internal/test/binding'); - const async_wrap = internalBinding('async_wrap'); + const destroydIdsList = {} + const destroyListList = {} + const initHandles = {} + + const { internalBinding } = require('internal/test/binding') + const async_wrap = internalBinding('async_wrap') process.on('exit', () => { // Iterate through handles to make sure nothing crashes - for (const k in initHandles) - util.inspect(initHandles[k]); - }); + for (const k in initHandles) util.inspect(initHandles[k]) + }) + const _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId - const _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId; async_wrap.queueDestroyAsyncId = function queueDestroyAsyncId(id) { if (destroyListList[id] !== undefined) { - process._rawDebug(destroyListList[id]); - process._rawDebug(); - throw new Error(`same id added to destroy list twice (${id})`); + process._rawDebug(destroyListList[id]) + + process._rawDebug() + + throw new Error(`same id added to destroy list twice (${id})`) } - destroyListList[id] = util.inspect(new Error()); - _queueDestroyAsyncId(id); - }; - - require('async_hooks').createHook({ - init(id, ty, tr, resource) { - if (initHandles[id]) { - process._rawDebug( - `Is same resource: ${resource === initHandles[id].resource}`); - process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`); - throw new Error(`init called twice for same id (${id})`); - } - initHandles[id] = { - resource, - stack: util.inspect(new Error()).substr(6) - }; - }, - before() { }, - after() { }, - destroy(id) { - if (destroydIdsList[id] !== undefined) { - process._rawDebug(destroydIdsList[id]); - process._rawDebug(); - throw new Error(`destroy called for same id (${id})`); - } - destroydIdsList[id] = util.inspect(new Error()); - }, - }).enable(); -} -let opensslCli = null; -let inFreeBSDJail = null; -let localhostIPv4 = null; + destroyListList[id] = util.inspect(new Error()) -const localIPv6Hosts = - isLinux ? [ - // Debian/Ubuntu - 'ip6-localhost', - 'ip6-loopback', + _queueDestroyAsyncId(id) + } - // SUSE - 'ipv6-localhost', - 'ipv6-loopback', + require('async_hooks') + .createHook({ + init(id, ty, tr, resource) { + if (initHandles[id]) { + process._rawDebug(`Is same resource: ${resource === initHandles[id].resource}`) - // Typically universal - 'localhost', - ] : [ 'localhost' ]; + process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`) -const PIPE = (() => { - const localRelative = path.relative(process.cwd(), `${tmpdir.path}/`); - const pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative; - const pipeName = `node-test.${process.pid}.sock`; - return path.join(pipePrefix, pipeName); -})(); + throw new Error(`init called twice for same id (${id})`) + } + + initHandles[id] = { + resource, + stack: util.inspect(new Error()).substr(6) + } + }, + + before() {}, + + after() {}, -// Check that when running a test with + destroy(id) { + if (destroydIdsList[id] !== undefined) { + process._rawDebug(destroydIdsList[id]) + + process._rawDebug() + + throw new Error(`destroy called for same id (${id})`) + } + + destroydIdsList[id] = util.inspect(new Error()) + } + }) + .enable() +} + +let opensslCli = null +let inFreeBSDJail = null +let localhostIPv4 = null +const localIPv6Hosts = isLinux + ? [ + // Debian/Ubuntu + 'ip6-localhost', + 'ip6-loopback', // SUSE + 'ipv6-localhost', + 'ipv6-loopback', // Typically universal + 'localhost' + ] + : ['localhost'] + +const PIPE = (() => { + const localRelative = path.relative(process.cwd(), `${tmpdir.path}/`) + const pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative + const pipeName = `node-test.${process.pid}.sock` + return path.join(pipePrefix, pipeName) +})() // Check that when running a test with // `$node --abort-on-uncaught-exception $file child` // the process aborts. + function childShouldThrowAndAbort() { - let testCmd = ''; + let testCmd = '' + if (!isWindows) { // Do not create core files, as it can take a lot of disk space on // continuous testing and developers' machines - testCmd += 'ulimit -c 0 && '; + testCmd += 'ulimit -c 0 && ' } - testCmd += `"${process.argv[0]}" --abort-on-uncaught-exception `; - testCmd += `"${process.argv[1]}" child`; - const child = exec(testCmd); + + testCmd += `"${process.argv[0]}" --abort-on-uncaught-exception ` + testCmd += `"${process.argv[1]}" child` + const child = exec(testCmd) child.on('exit', function onExit(exitCode, signal) { - const errMsg = 'Test should have aborted ' + - `but instead exited with exit code ${exitCode}` + - ` and signal ${signal}`; - assert(nodeProcessAborted(exitCode, signal), errMsg); - }); + const errMsg = + 'Test should have aborted ' + `but instead exited with exit code ${exitCode}` + ` and signal ${signal}` + assert(nodeProcessAborted(exitCode, signal), errMsg) + }) } function createZeroFilledFile(filename) { - const fd = fs.openSync(filename, 'w'); - fs.ftruncateSync(fd, 10 * 1024 * 1024); - fs.closeSync(fd); + const fd = fs.openSync(filename, 'w') + fs.ftruncateSync(fd, 10 * 1024 * 1024) + fs.closeSync(fd) } - -const pwdCommand = isWindows ? - ['cmd.exe', ['/d', '/c', 'cd']] : - ['pwd', []]; - +const pwdCommand = isWindows ? ['cmd.exe', ['/d', '/c', 'cd']] : ['pwd', []] function platformTimeout(ms) { - const multipliers = typeof ms === 'bigint' ? - { two: 2n, four: 4n, seven: 7n } : { two: 2, four: 4, seven: 7 }; - - if (process.features.debug) - ms = multipliers.two * ms; - - if (isAIX) - return multipliers.two * ms; // Default localhost speed is slower on AIX + const multipliers = + typeof ms === 'bigint' + ? { + two: 2n, + four: 4n, + seven: 7n + } + : { + two: 2, + four: 4, + seven: 7 + } + if (process.features.debug) ms = multipliers.two * ms + if (isAIX) return multipliers.two * ms // Default localhost speed is slower on AIX - if (isPi) - return multipliers.two * ms; // Raspberry Pi devices + if (isPi) return multipliers.two * ms // Raspberry Pi devices - return ms; + return ms } - - let knownGlobals = [ - - typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), - typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, - typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, - typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, +let knownGlobals = [ + typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), + typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, + typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, + typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, atob, btoa, clearImmediate, @@ -279,48 +293,49 @@ function platformTimeout(ms) { setImmediate, setInterval, setTimeout, - queueMicrotask, -]; - -// TODO(@jasnell): This check can be temporary. AbortController is + queueMicrotask +] // TODO(@jasnell): This check can be temporary. AbortController is // not currently supported in either Node.js 12 or 10, making it // difficult to run tests comparatively on those versions. Once // all supported versions have AbortController as a global, this // check can be removed and AbortController can be added to the // knownGlobals list above. -if (global.AbortController) - knownGlobals.push(global.AbortController); + +if (global.AbortController) knownGlobals.push(global.AbortController) if (global.gc) { - knownGlobals.push(global.gc); + knownGlobals.push(global.gc) } if (global.performance) { - knownGlobals.push(global.performance); + knownGlobals.push(global.performance) } + if (global.PerformanceMark) { - knownGlobals.push(global.PerformanceMark); -} -if (global.PerformanceMeasure) { - knownGlobals.push(global.PerformanceMeasure); + knownGlobals.push(global.PerformanceMark) } -// TODO(@ethan-arrowood): Similar to previous checks, this can be temporary +if (global.PerformanceMeasure) { + knownGlobals.push(global.PerformanceMeasure) +} // TODO(@ethan-arrowood): Similar to previous checks, this can be temporary // until v16.x is EOL. Once all supported versions have structuredClone we // can add this to the list above instead. + if (global.structuredClone) { - knownGlobals.push(global.structuredClone); + knownGlobals.push(global.structuredClone) } if (global.fetch) { - knownGlobals.push(fetch); + knownGlobals.push(fetch) } + if (hasCrypto && global.crypto) { - knownGlobals.push(global.crypto); - knownGlobals.push(global.Crypto); - knownGlobals.push(global.CryptoKey); - knownGlobals.push(global.SubtleCrypto); + knownGlobals.push(global.crypto) + knownGlobals.push(global.Crypto) + knownGlobals.push(global.CryptoKey) + knownGlobals.push(global.SubtleCrypto) } + if (global.ReadableStream) { knownGlobals.push( global.ReadableStream, @@ -339,144 +354,140 @@ if (global.ReadableStream) { global.TextEncoderStream, global.TextDecoderStream, global.CompressionStream, - global.DecompressionStream, - ); + global.DecompressionStream + ) } function allowGlobals(...allowlist) { - knownGlobals = knownGlobals.concat(allowlist); + knownGlobals = knownGlobals.concat(allowlist) } if (process.env.NODE_TEST_KNOWN_GLOBALS !== '0') { if (process.env.NODE_TEST_KNOWN_GLOBALS) { - const knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(','); - allowGlobals(...knownFromEnv); + const knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(',') + allowGlobals(...knownFromEnv) } function leakedGlobals() { - const leaked = []; + const leaked = [] for (const val in global) { if (!knownGlobals.includes(global[val])) { - leaked.push(val); + leaked.push(val) } } - return leaked; + return leaked } - process.on('exit', function() { - const leaked = leakedGlobals(); + process.on('exit', function () { + const leaked = leakedGlobals() + if (leaked.length > 0) { - assert.fail(`Unexpected global(s) found: ${leaked.join(', ')}`); + assert.fail(`Unexpected global(s) found: ${leaked.join(', ')}`) } - }); + }) } -const mustCallChecks = []; +const mustCallChecks = [] function runCallChecks(exitCode) { - if (exitCode !== 0) return; - - const failed = mustCallChecks.filter(function(context) { + if (exitCode !== 0) return + const failed = mustCallChecks.filter(function (context) { if ('minimum' in context) { - context.messageSegment = `at least ${context.minimum}`; - return context.actual < context.minimum; + context.messageSegment = `at least ${context.minimum}` + return context.actual < context.minimum } - context.messageSegment = `exactly ${context.exact}`; - return context.actual !== context.exact; - }); - failed.forEach(function(context) { - console.log('Mismatched %s function calls. Expected %s, actual %d.', - context.name, - context.messageSegment, - context.actual); - console.log(context.stack.split('\n').slice(2).join('\n')); - }); - - if (failed.length) process.exit(1); + context.messageSegment = `exactly ${context.exact}` + return context.actual !== context.exact + }) + failed.forEach(function (context) { + console.log( + 'Mismatched %s function calls. Expected %s, actual %d.', + context.name, + context.messageSegment, + context.actual + ) + console.log(context.stack.split('\n').slice(2).join('\n')) + }) + if (failed.length) process.exit(1) } function mustCall(fn, exact) { - return _mustCallInner(fn, exact, 'exact'); + return _mustCallInner(fn, exact, 'exact') } function mustSucceed(fn, exact) { - return mustCall(function(err, ...args) { - assert.ifError(err); - if (typeof fn === 'function') - return fn.apply(this, args); - }, exact); + return mustCall(function (err, ...args) { + assert.ifError(err) + if (typeof fn === 'function') return fn.apply(this, args) + }, exact) } function mustCallAtLeast(fn, minimum) { - return _mustCallInner(fn, minimum, 'minimum'); + return _mustCallInner(fn, minimum, 'minimum') } function _mustCallInner(fn, criteria = 1, field) { - if (process._exiting) - throw new Error('Cannot use common.mustCall*() in process exit handler'); + if (process._exiting) throw new Error('Cannot use common.mustCall*() in process exit handler') + if (typeof fn === 'number') { - criteria = fn; - fn = noop; + criteria = fn + fn = noop } else if (fn === undefined) { - fn = noop; + fn = noop } - if (typeof criteria !== 'number') - throw new TypeError(`Invalid ${field} value: ${criteria}`); - + if (typeof criteria !== 'number') throw new TypeError(`Invalid ${field} value: ${criteria}`) const context = { [field]: criteria, actual: 0, stack: util.inspect(new Error()), name: fn.name || '' - }; - - // Add the exit listener only once to avoid listener leak warnings - if (mustCallChecks.length === 0) process.on('exit', runCallChecks); + } // Add the exit listener only once to avoid listener leak warnings - mustCallChecks.push(context); + if (mustCallChecks.length === 0) process.on('exit', runCallChecks) + mustCallChecks.push(context) - const _return = function() { // eslint-disable-line func-style - context.actual++; - return fn.apply(this, arguments); - }; - // Function instances have own properties that may be relevant. + const _return = function () { + // eslint-disable-line func-style + context.actual++ + return fn.apply(this, arguments) + } // Function instances have own properties that may be relevant. // Let's replicate those properties to the returned function. // Refs: https://tc39.es/ecma262/#sec-function-instances + Object.defineProperties(_return, { name: { value: fn.name, writable: false, enumerable: false, - configurable: true, + configurable: true }, length: { value: fn.length, writable: false, enumerable: false, - configurable: true, - }, - }); - return _return; + configurable: true + } + }) + return _return } function hasMultiLocalhost() { - const { internalBinding } = require('internal/test/binding'); - const { TCP, constants: TCPConstants } = internalBinding('tcp_wrap'); - const t = new TCP(TCPConstants.SOCKET); - const ret = t.bind('127.0.0.2', 0); - t.close(); - return ret === 0; + const { internalBinding } = require('internal/test/binding') + + const { TCP, constants: TCPConstants } = internalBinding('tcp_wrap') + const t = new TCP(TCPConstants.SOCKET) + const ret = t.bind('127.0.0.2', 0) + t.close() + return ret === 0 } function skipIfEslintMissing() { - if (!fs.existsSync( - path.join(__dirname, '..', '..', 'tools', 'node_modules', 'eslint') - )) { - skip('missing ESLint'); + if (!fs.existsSync(path.join(__dirname, '..', '..', 'tools', 'node_modules', 'eslint'))) { + skip('missing ESLint') } } @@ -488,194 +499,192 @@ function canCreateSymLink() { // whoami.exe needs to be the one from System32 // If unix tools are in the path, they can shadow the one we want, // so use the full path while executing whoami - const whoamiPath = path.join(process.env.SystemRoot, - 'System32', 'whoami.exe'); + const whoamiPath = path.join(process.env.SystemRoot, 'System32', 'whoami.exe') try { - const output = execSync(`${whoamiPath} /priv`, { timeout: 1000 }); - return output.includes('SeCreateSymbolicLinkPrivilege'); + const output = execSync(`${whoamiPath} /priv`, { + timeout: 1000 + }) + return output.includes('SeCreateSymbolicLinkPrivilege') } catch { - return false; + return false } - } - // On non-Windows platforms, this always returns `true` - return true; + } // On non-Windows platforms, this always returns `true` + + return true } function getCallSite(top) { - const originalStackFormatter = Error.prepareStackTrace; - Error.prepareStackTrace = (err, stack) => - `${stack[0].getFileName()}:${stack[0].getLineNumber()}`; - const err = new Error(); - Error.captureStackTrace(err, top); - // With the V8 Error API, the stack is not formatted until it is accessed - err.stack; // eslint-disable-line no-unused-expressions - Error.prepareStackTrace = originalStackFormatter; - return err.stack; + const originalStackFormatter = Error.prepareStackTrace + + Error.prepareStackTrace = (err, stack) => `${stack[0].getFileName()}:${stack[0].getLineNumber()}` + + const err = new Error() + Error.captureStackTrace(err, top) // With the V8 Error API, the stack is not formatted until it is accessed + + err.stack // eslint-disable-line no-unused-expressions + + Error.prepareStackTrace = originalStackFormatter + return err.stack } function mustNotCall(msg) { - const callSite = getCallSite(mustNotCall); + const callSite = getCallSite(mustNotCall) return function mustNotCall(...args) { - const argsInfo = args.length > 0 ? - `\ncalled with arguments: ${args.map(util.inspect).join(', ')}` : ''; - assert.fail( - `${msg || 'function should not have been called'} at ${callSite}` + - argsInfo); - }; + const argsInfo = args.length > 0 ? `\ncalled with arguments: ${args.map(util.inspect).join(', ')}` : '' + assert.fail(`${msg || 'function should not have been called'} at ${callSite}` + argsInfo) + } } function printSkipMessage(msg) { - console.log(`1..0 # Skipped: ${msg}`); + console.log(`1..0 # Skipped: ${msg}`) } function skip(msg) { - printSkipMessage(msg); - process.exit(0); -} - -// Returns true if the exit code "exitCode" and/or signal name "signal" + printSkipMessage(msg) + process.exit(0) +} // Returns true if the exit code "exitCode" and/or signal name "signal" // represent the exit code and/or signal name of a node process that aborted, // false otherwise. + function nodeProcessAborted(exitCode, signal) { // Depending on the compiler used, node will exit with either // exit code 132 (SIGILL), 133 (SIGTRAP) or 134 (SIGABRT). - let expectedExitCodes = [132, 133, 134]; - - // On platforms using KSH as the default shell (like SmartOS), + let expectedExitCodes = [132, 133, 134] // On platforms using KSH as the default shell (like SmartOS), // when a process aborts, KSH exits with an exit code that is // greater than 256, and thus the exit code emitted with the 'exit' // event is null and the signal is set to either SIGILL, SIGTRAP, // or SIGABRT (depending on the compiler). - const expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT']; - // On Windows, 'aborts' are of 2 types, depending on the context: + const expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT'] // On Windows, 'aborts' are of 2 types, depending on the context: // (i) Exception breakpoint, if --abort-on-uncaught-exception is on // which corresponds to exit code 2147483651 (0x80000003) // (ii) Otherwise, _exit(134) which is called in place of abort() due to // raising SIGABRT exiting with ambiguous exit code '3' by default - if (isWindows) - expectedExitCodes = [0x80000003, 134]; - // When using --abort-on-uncaught-exception, V8 will use + if (isWindows) expectedExitCodes = [0x80000003, 134] // When using --abort-on-uncaught-exception, V8 will use // base::OS::Abort to terminate the process. // Depending on the compiler used, the shell or other aspects of // the platform used to build the node binary, this will actually // make V8 exit by aborting or by raising a signal. In any case, // one of them (exit code or signal) needs to be set to one of // the expected exit codes or signals. + if (signal !== null) { - return expectedSignals.includes(signal); + return expectedSignals.includes(signal) } - return expectedExitCodes.includes(exitCode); + + return expectedExitCodes.includes(exitCode) } function isAlive(pid) { try { - process.kill(pid, 'SIGCONT'); - return true; + process.kill(pid, 'SIGCONT') + return true } catch { - return false; + return false } } function _expectWarning(name, expected, code) { if (typeof expected === 'string') { - expected = [[expected, code]]; + expected = [[expected, code]] } else if (!Array.isArray(expected)) { - expected = Object.entries(expected).map(([a, b]) => [b, a]); - } else if (!(Array.isArray(expected[0]))) { - expected = [[expected[0], expected[1]]]; - } - // Deprecation codes are mandatory, everything else is not. + expected = Object.entries(expected).map(([a, b]) => [b, a]) + } else if (!Array.isArray(expected[0])) { + expected = [[expected[0], expected[1]]] + } // Deprecation codes are mandatory, everything else is not. + if (name === 'DeprecationWarning') { - expected.forEach(([_, code]) => assert(code, expected)); + expected.forEach(([_, code]) => assert(code, expected)) } + return mustCall((warning) => { - const expectedProperties = expected.shift(); + const expectedProperties = expected.shift() + if (!expectedProperties) { - assert.fail(`Unexpected extra warning received: ${warning}`); + assert.fail(`Unexpected extra warning received: ${warning}`) } - const [ message, code ] = expectedProperties; - assert.strictEqual(warning.name, name); + + const [message, code] = expectedProperties + assert.strictEqual(warning.name, name) + if (typeof message === 'string') { - assert.strictEqual(warning.message, message); + assert.strictEqual(warning.message, message) } else { - assert.match(warning.message, message); + assert.match(warning.message, message) } - assert.strictEqual(warning.code, code); - }, expected.length); -} -let catchWarning; + assert.strictEqual(warning.code, code) + }, expected.length) +} -// Accepts a warning name and description or array of descriptions or a map of +let catchWarning // Accepts a warning name and description or array of descriptions or a map of // warning names to description(s) ensures a warning is generated for each // name/description pair. // The expected messages have to be unique per `expectWarning()` call. + function expectWarning(nameOrMap, expected, code) { if (catchWarning === undefined) { - catchWarning = {}; + catchWarning = {} process.on('warning', (warning) => { if (!catchWarning[warning.name]) { - throw new TypeError( - `"${warning.name}" was triggered without being expected.\n` + - util.inspect(warning) - ); + throw new TypeError(`"${warning.name}" was triggered without being expected.\n` + util.inspect(warning)) } - catchWarning[warning.name](warning); - }); + + catchWarning[warning.name](warning) + }) } + if (typeof nameOrMap === 'string') { - catchWarning[nameOrMap] = _expectWarning(nameOrMap, expected, code); + catchWarning[nameOrMap] = _expectWarning(nameOrMap, expected, code) } else { Object.keys(nameOrMap).forEach((name) => { - catchWarning[name] = _expectWarning(name, nameOrMap[name]); - }); + catchWarning[name] = _expectWarning(name, nameOrMap[name]) + }) } -} +} // Useful for testing expected internal/error objects -// Useful for testing expected internal/error objects function expectsError(validator, exact) { return mustCall((...args) => { if (args.length !== 1) { // Do not use `assert.strictEqual()` to prevent `inspect` from // always being called. - assert.fail(`Expected one argument, got ${util.inspect(args)}`); + assert.fail(`Expected one argument, got ${util.inspect(args)}`) } - const error = args.pop(); - const descriptor = Object.getOwnPropertyDescriptor(error, 'message'); - // The error message should be non-enumerable - assert.strictEqual(descriptor.enumerable, false); - assert.throws(() => { throw error; }, validator); - return true; - }, exact); + const error = args.pop() + const descriptor = Object.getOwnPropertyDescriptor(error, 'message') // The error message should be non-enumerable + + assert.strictEqual(descriptor.enumerable, false) + assert.throws(() => { + throw error + }, validator) + return true + }, exact) } function skipIfInspectorDisabled() { if (!process.features.inspector) { - skip('V8 inspector is disabled'); + skip('V8 inspector is disabled') } } function skipIf32Bits() { if (bits < 64) { - skip('The tested feature is not available in 32bit builds'); + skip('The tested feature is not available in 32bit builds') } } function skipIfWorker() { if (!isMainThread) { - skip('This test only works on a main thread'); + skip('This test only works on a main thread') } } function getArrayBufferViews(buf) { - const { buffer, byteOffset, byteLength } = buf; - - const out = []; - + const { buffer, byteOffset, byteLength } = buf + const out = [] const arrayBufferViews = [ Int8Array, Uint8Array, @@ -686,113 +695,130 @@ function getArrayBufferViews(buf) { Uint32Array, Float32Array, Float64Array, - DataView, - ]; + DataView + ] for (const type of arrayBufferViews) { - const { BYTES_PER_ELEMENT = 1 } = type; + const { BYTES_PER_ELEMENT = 1 } = type + if (byteLength % BYTES_PER_ELEMENT === 0) { - out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT)); + out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT)) } } - return out; + + return out } function getBufferSources(buf) { - return [...getArrayBufferViews(buf), new Uint8Array(buf).buffer]; + return [...getArrayBufferViews(buf), new Uint8Array(buf).buffer] } function getTTYfd() { // Do our best to grab a tty fd. - const tty = require('tty'); - // Don't attempt fd 0 as it is not writable on Windows. + const tty = require('tty') // Don't attempt fd 0 as it is not writable on Windows. // Ref: ef2861961c3d9e9ed6972e1e84d969683b25cf95 - const ttyFd = [1, 2, 4, 5].find(tty.isatty); + + const ttyFd = [1, 2, 4, 5].find(tty.isatty) + if (ttyFd === undefined) { try { - return fs.openSync('/dev/tty'); + return fs.openSync('/dev/tty') } catch { // There aren't any tty fd's available to use. - return -1; + return -1 } } - return ttyFd; + + return ttyFd } function runWithInvalidFD(func) { - let fd = 1 << 30; - // Get first known bad file descriptor. 1 << 30 is usually unlikely to + let fd = 1 << 30 // Get first known bad file descriptor. 1 << 30 is usually unlikely to // be an valid one. + try { while (fs.fstatSync(fd--) && fd > 0); } catch { - return func(fd); + return func(fd) } - printSkipMessage('Could not generate an invalid fd'); -} + printSkipMessage('Could not generate an invalid fd') +} // A helper function to simplify checking for ERR_INVALID_ARG_TYPE output. -// A helper function to simplify checking for ERR_INVALID_ARG_TYPE output. function invalidArgTypeHelper(input) { if (input == null) { - return ` Received ${input}`; + return ` Received ${input}` } + if (typeof input === 'function' && input.name) { - return ` Received function ${input.name}`; + return ` Received function ${input.name}` } + if (typeof input === 'object') { if (input.constructor && input.constructor.name) { - return ` Received an instance of ${input.constructor.name}`; + return ` Received an instance of ${input.constructor.name}` } - return ` Received ${util.inspect(input, { depth: -1 })}`; + + return ` Received ${util.inspect(input, { + depth: -1 + })}` } - let inspected = util.inspect(input, { colors: false }); - if (inspected.length > 25) - inspected = `${inspected.slice(0, 25)}...`; - return ` Received type ${typeof input} (${inspected})`; + + let inspected = util.inspect(input, { + colors: false + }) + if (inspected.length > 25) inspected = `${inspected.slice(0, 25)}...` + return ` Received type ${typeof input} (${inspected})` } function skipIfDumbTerminal() { if (isDumbTerminal) { - skip('skipping - dumb terminal'); + skip('skipping - dumb terminal') } } function gcUntil(name, condition) { if (typeof name === 'function') { - condition = name; - name = undefined; + condition = name + name = undefined } + return new Promise((resolve, reject) => { - let count = 0; + let count = 0 + function gcAndCheck() { setImmediate(() => { - count++; - global.gc(); + count++ + global.gc() + if (condition()) { - resolve(); + resolve() } else if (count < 10) { - gcAndCheck(); + gcAndCheck() } else { - reject(name === undefined ? undefined : 'Test ' + name + ' failed'); + reject(name === undefined ? undefined : 'Test ' + name + ' failed') } - }); + }) } - gcAndCheck(); - }); + + gcAndCheck() + }) } function requireNoPackageJSONAbove(dir = __dirname) { - let possiblePackage = path.join(dir, '..', 'package.json'); - let lastPackage = null; + let possiblePackage = path.join(dir, '..', 'package.json') + let lastPackage = null + while (possiblePackage !== lastPackage) { if (fs.existsSync(possiblePackage)) { assert.fail( - 'This test shouldn\'t load properties from a package.json above ' + - `its file location. Found package.json at ${possiblePackage}.`); + "This test shouldn't load properties from a package.json above " + + `its file location. Found package.json at ${possiblePackage}.` + ) } - lastPackage = possiblePackage; - possiblePackage = path.join(possiblePackage, '..', '..', 'package.json'); + + lastPackage = possiblePackage + possiblePackage = path.join(possiblePackage, '..', '..', 'package.json') } } @@ -846,129 +872,140 @@ const common = { skipIfWorker, get enoughTestMem() { - return require('os').totalmem() > 0x70000000; /* 1.75 Gb */ + return require('os').totalmem() > 0x70000000 + /* 1.75 Gb */ }, get hasFipsCrypto() { - return hasCrypto && require('crypto').getFips(); + return hasCrypto && require('crypto').getFips() }, get hasIPv6() { - const iFaces = require('os').networkInterfaces(); - const re = isWindows ? /Loopback Pseudo-Interface/ : /lo/; + const iFaces = require('os').networkInterfaces() + + const re = isWindows ? /Loopback Pseudo-Interface/ : /lo/ return Object.keys(iFaces).some((name) => { - return re.test(name) && - iFaces[name].some(({ family }) => family === 6); - }); + return re.test(name) && iFaces[name].some(({ family }) => family === 6) + }) }, get inFreeBSDJail() { - if (inFreeBSDJail !== null) return inFreeBSDJail; + if (inFreeBSDJail !== null) return inFreeBSDJail - if (exports.isFreeBSD && - execSync('sysctl -n security.jail.jailed').toString() === '1\n') { - inFreeBSDJail = true; + if (exports.isFreeBSD && execSync('sysctl -n security.jail.jailed').toString() === '1\n') { + inFreeBSDJail = true } else { - inFreeBSDJail = false; + inFreeBSDJail = false } - return inFreeBSDJail; + + return inFreeBSDJail }, // On IBMi, process.platform and os.platform() both return 'aix', // It is not enough to differentiate between IBMi and real AIX system. get isIBMi() { - return require('os').type() === 'OS400'; + return require('os').type() === 'OS400' }, get isLinuxPPCBE() { - return (process.platform === 'linux') && (process.arch === 'ppc64') && - (require('os').endianness() === 'BE'); + return process.platform === 'linux' && process.arch === 'ppc64' && require('os').endianness() === 'BE' }, get localhostIPv4() { - if (localhostIPv4 !== null) return localhostIPv4; + if (localhostIPv4 !== null) return localhostIPv4 if (this.inFreeBSDJail) { // Jailed network interfaces are a bit special - since we need to jump // through loops, as well as this being an exception case, assume the // user will provide this instead. if (process.env.LOCALHOST) { - localhostIPv4 = process.env.LOCALHOST; + localhostIPv4 = process.env.LOCALHOST } else { - console.error('Looks like we\'re in a FreeBSD Jail. ' + - 'Please provide your default interface address ' + - 'as LOCALHOST or expect some tests to fail.'); + console.error( + "Looks like we're in a FreeBSD Jail. " + + 'Please provide your default interface address ' + + 'as LOCALHOST or expect some tests to fail.' + ) } } - if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1'; - - return localhostIPv4; + if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1' + return localhostIPv4 }, // opensslCli defined lazily to reduce overhead of spawnSync get opensslCli() { - if (opensslCli !== null) return opensslCli; + if (opensslCli !== null) return opensslCli if (process.config.variables.node_shared_openssl) { // Use external command - opensslCli = 'openssl'; + opensslCli = 'openssl' } else { // Use command built from sources included in Node.js repository - opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli'); + opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli') } - if (exports.isWindows) opensslCli += '.exe'; + if (exports.isWindows) opensslCli += '.exe' + const opensslCmd = spawnSync(opensslCli, ['version']) - const opensslCmd = spawnSync(opensslCli, ['version']); if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) { // OpenSSL command cannot be executed - opensslCli = false; + opensslCli = false } - return opensslCli; + + return opensslCli }, get PORT() { if (+process.env.TEST_PARALLEL) { - throw new Error('common.PORT cannot be used in a parallelized test'); + throw new Error('common.PORT cannot be used in a parallelized test') } - return +process.env.NODE_COMMON_PORT || 12346; + + return +process.env.NODE_COMMON_PORT || 12346 }, /** * Returns the EOL character used by this Git checkout. */ get checkoutEOL() { - return fs.readFileSync(__filename).includes('\r\n') ? '\r\n' : '\n'; - }, -}; - -const validProperties = new Set(Object.keys(common)); + return fs.readFileSync(__filename).includes('\r\n') ? '\r\n' : '\n' + } +} +const validProperties = new Set(Object.keys(common)) module.exports = new Proxy(common, { get(obj, prop) { - if (!validProperties.has(prop)) - throw new Error(`Using invalid common property: '${prop}'`); - return obj[prop]; + if (!validProperties.has(prop)) throw new Error(`Using invalid common property: '${prop}'`) + return obj[prop] } -}); +}) +/* replacement start */ - /* replacement start */ - if (typeof EventTarget === 'undefined') { - globalThis.EventTarget = require('event-target-shim').EventTarget; - } +if (typeof Blob === 'undefined') { + let { Blob } = require('buffer') - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; + if (typeof Blob === 'undefined') { + Blob = require('blob-polyfill').Blob } - if (typeof AbortSignal === 'undefined') { - globalThis.AbortSignal = require('abort-controller').AbortSignal; + globalThis.Blob = Blob + allowGlobals(Blob) +} + +if (typeof EventTarget === 'undefined') { + globalThis.EventTarget = require('event-target-shim').EventTarget +} + +if (typeof AbortController === 'undefined') { + globalThis.AbortController = require('abort-controller').AbortController +} + +if (typeof AbortSignal === 'undefined') { + globalThis.AbortSignal = require('abort-controller').AbortSignal - globalThis.AbortSignal.abort = function() { - const controller = new AbortController(); - controller.abort(); - - return controller.signal; - } + globalThis.AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal } - /* replacement end */ +} +/* replacement end */ diff --git a/test/common/index.mjs b/test/common/index.mjs index ec181dcacb..85c2548b71 100644 --- a/test/common/index.mjs +++ b/test/common/index.mjs @@ -1,7 +1,7 @@ -import { createRequire } from 'module'; +import { createRequire } from 'module' -const require = createRequire(import.meta.url); -const common = require('./index.js'); +const require = createRequire(import.meta.url) +const common = require('./index.js') const { isMainThread, @@ -47,7 +47,7 @@ const { getBufferSources, getTTYfd, runWithInvalidFD -} = common; +} = common export { isMainThread, @@ -94,4 +94,4 @@ export { getTTYfd, runWithInvalidFD, createRequire -}; +} diff --git a/test/common/tmpdir.js b/test/common/tmpdir.js index 0bafea1582..4e29aa9691 100644 --- a/test/common/tmpdir.js +++ b/test/common/tmpdir.js @@ -1,61 +1,61 @@ -'use strict'; +'use strict' -const fs = require('fs'); -const path = require('path'); -const { isMainThread } = require('worker_threads'); +const fs = require('fs') + +const path = require('path') + +const { isMainThread } = require('worker_threads') function rmSync(pathname) { - fs.rmSync(pathname, { maxRetries: 3, recursive: true, force: true }); + fs.rmSync(pathname, { + maxRetries: 3, + recursive: true, + force: true + }) } -const testRoot = process.env.NODE_TEST_DIR ? - fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..'); - -// Using a `.` prefixed name, which is the convention for "hidden" on POSIX, +const testRoot = process.env.NODE_TEST_DIR ? fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..') // Using a `.` prefixed name, which is the convention for "hidden" on POSIX, // gets tools to ignore it by default or by simple rules, especially eslint. -const tmpdirName = '.tmp.' + - (process.env.TEST_SERIAL_ID || process.env.TEST_THREAD_ID || '0'); -const tmpPath = path.join(testRoot, tmpdirName); -let firstRefresh = true; +const tmpdirName = '.tmp.' + (process.env.TEST_SERIAL_ID || process.env.TEST_THREAD_ID || '0') +const tmpPath = path.join(testRoot, tmpdirName) +let firstRefresh = true + function refresh() { - rmSync(this.path); - fs.mkdirSync(this.path); + rmSync(this.path) + fs.mkdirSync(this.path) if (firstRefresh) { - firstRefresh = false; - // Clean only when a test uses refresh. This allows for child processes to + firstRefresh = false // Clean only when a test uses refresh. This allows for child processes to // use the tmpdir and only the parent will clean on exit. - process.on('exit', onexit); + + process.on('exit', onexit) } } function onexit() { // Change directory to avoid possible EBUSY - if (isMainThread) - process.chdir(testRoot); + if (isMainThread) process.chdir(testRoot) try { - rmSync(tmpPath); + rmSync(tmpPath) } catch (e) { - console.error('Can\'t clean tmpdir:', tmpPath); - - const files = fs.readdirSync(tmpPath); - console.error('Files blocking:', files); + console.error("Can't clean tmpdir:", tmpPath) + const files = fs.readdirSync(tmpPath) + console.error('Files blocking:', files) if (files.some((f) => f.startsWith('.nfs'))) { // Warn about NFS "silly rename" - console.error('Note: ".nfs*" might be files that were open and ' + - 'unlinked but not closed.'); - console.error('See http://nfs.sourceforge.net/#faq_d2 for details.'); + console.error('Note: ".nfs*" might be files that were open and ' + 'unlinked but not closed.') + console.error('See http://nfs.sourceforge.net/#faq_d2 for details.') } - console.error(); - throw e; + console.error() + throw e } } module.exports = { path: tmpPath, refresh -}; +} diff --git a/test/ours/test-errors.js b/test/ours/test-errors.js index a300f17075..84a34a574d 100644 --- a/test/ours/test-errors.js +++ b/test/ours/test-errors.js @@ -1,6 +1,7 @@ 'use strict' const t = require('tap') + const { codes: errors } = require('../../lib/ours/errors') function checkError(err, Base, name, code, message) { @@ -8,11 +9,9 @@ function checkError(err, Base, name, code, message) { t.equal(err.name, name) t.equal(err.code, code) t.equal(err.message, message) -} +} // Update this numbers based on the number of checkError below multiplied by the assertions within checkError -// Update this numbers based on the number of checkError below multiplied by the assertions within checkError t.plan(17 * 4) - checkError( new errors.ERR_INVALID_ARG_VALUE('name', 0), TypeError, @@ -20,7 +19,6 @@ checkError( 'ERR_INVALID_ARG_VALUE', "The argument 'name' is invalid. Received 0" ) - checkError( new errors.ERR_INVALID_ARG_VALUE('name', undefined), TypeError, @@ -28,7 +26,6 @@ checkError( 'ERR_INVALID_ARG_VALUE', "The argument 'name' is invalid. Received undefined" ) - checkError( new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0), TypeError, @@ -36,7 +33,6 @@ checkError( 'ERR_INVALID_ARG_TYPE', 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)' ) - checkError( new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'), TypeError, @@ -44,7 +40,6 @@ checkError( 'ERR_INVALID_ARG_TYPE', "The first argument must be not string. Received type string ('foo')" ) - checkError( new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined), TypeError, @@ -52,7 +47,6 @@ checkError( 'ERR_INVALID_ARG_TYPE', 'The "obj.prop" property must be of type string. Received undefined' ) - checkError( new errors.ERR_STREAM_PUSH_AFTER_EOF(), Error, @@ -60,7 +54,6 @@ checkError( 'ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF' ) - checkError( new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'), Error, @@ -68,7 +61,6 @@ checkError( 'ERR_METHOD_NOT_IMPLEMENTED', 'The _read() method is not implemented' ) - checkError( new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'), Error, @@ -76,9 +68,7 @@ checkError( 'ERR_METHOD_NOT_IMPLEMENTED', 'The _write() method is not implemented' ) - checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close') - checkError( new errors.ERR_STREAM_DESTROYED('pipe'), Error, @@ -86,7 +76,6 @@ checkError( 'ERR_STREAM_DESTROYED', 'Cannot call pipe after a stream was destroyed' ) - checkError( new errors.ERR_STREAM_DESTROYED('write'), Error, @@ -94,7 +83,6 @@ checkError( 'ERR_STREAM_DESTROYED', 'Cannot call write after a stream was destroyed' ) - checkError( new errors.ERR_MULTIPLE_CALLBACK(), Error, @@ -102,11 +90,8 @@ checkError( 'ERR_MULTIPLE_CALLBACK', 'Callback called multiple times' ) - checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable') - checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end') - checkError( new errors.ERR_STREAM_NULL_VALUES(), TypeError, @@ -114,7 +99,6 @@ checkError( 'ERR_STREAM_NULL_VALUES', 'May not write null values to stream' ) - checkError( new errors.ERR_UNKNOWN_ENCODING('foo'), TypeError, @@ -122,7 +106,6 @@ checkError( 'ERR_UNKNOWN_ENCODING', 'Unknown encoding: foo' ) - checkError( new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(), Error, diff --git a/test/ours/test-fake-timers.js b/test/ours/test-fake-timers.js index be95e071a7..b4cc966b1c 100644 --- a/test/ours/test-fake-timers.js +++ b/test/ours/test-fake-timers.js @@ -1,9 +1,13 @@ 'use strict' require('../common') + const t = require('tap') + const util = require('util') + const fakeTimers = require('@sinonjs/fake-timers') + const Transform = require('../../lib/ours/index').Transform t.plan(1) @@ -13,10 +17,10 @@ function MyTransform() { } util.inherits(MyTransform, Transform) - -const clock = fakeTimers.install({ toFake: ['setImmediate', 'nextTick'] }) +const clock = fakeTimers.install({ + toFake: ['setImmediate', 'nextTick'] +}) let stream2DataCalled = false - const stream = new MyTransform() stream.on('data', function () { stream.on('end', function () { @@ -34,7 +38,6 @@ stream.on('data', function () { stream.end() }) stream.emit('data') - clock.runAll() clock.uninstall() t.ok(stream2DataCalled) diff --git a/test/ours/test-stream-sync-write.js b/test/ours/test-stream-sync-write.js index a12085a238..7fa0e46788 100644 --- a/test/ours/test-stream-sync-write.js +++ b/test/ours/test-stream-sync-write.js @@ -1,19 +1,23 @@ 'use strict' require('../common') + const t = require('tap') + const util = require('util') + const stream = require('../../lib/ours/index') -const WritableStream = stream.Writable +const WritableStream = stream.Writable t.plan(1) const InternalStream = function () { WritableStream.call(this) } -util.inherits(InternalStream, WritableStream) +util.inherits(InternalStream, WritableStream) let invocations = 0 + InternalStream.prototype._write = function (chunk, encoding, callback) { callback() } @@ -24,6 +28,7 @@ const ExternalStream = function (writable) { this._writable = writable WritableStream.call(this) } + util.inherits(ExternalStream, WritableStream) ExternalStream.prototype._write = function (chunk, encoding, callback) { diff --git a/test/parallel/test-readable-from-iterator-closing.js b/test/parallel/test-readable-from-iterator-closing.js index 0bcb4e66be..109ab7e34a 100644 --- a/test/parallel/test-readable-from-iterator-closing.js +++ b/test/parallel/test-readable-from-iterator-closing.js @@ -1,194 +1,205 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const { mustCall, mustNotCall } = require('../common'); -const { Readable } = require('../../lib/ours/index'); -const { strictEqual } = require('assert'); + error() {} +} +const { mustCall, mustNotCall } = require('../common') + +const { Readable } = require('../../lib/ours/index') + +const { strictEqual } = require('assert') async function asyncSupport() { - const finallyMustCall = mustCall(); - const bodyMustCall = mustCall(); + const finallyMustCall = mustCall() + const bodyMustCall = mustCall() async function* infiniteGenerate() { try { - while (true) yield 'a'; + while (true) yield 'a' } finally { - finallyMustCall(); + finallyMustCall() } } - const stream = Readable.from(infiniteGenerate()); + const stream = Readable.from(infiniteGenerate()) for await (const chunk of stream) { - bodyMustCall(); - strictEqual(chunk, 'a'); - break; + bodyMustCall() + strictEqual(chunk, 'a') + break } } async function syncSupport() { - const finallyMustCall = mustCall(); - const bodyMustCall = mustCall(); + const finallyMustCall = mustCall() + const bodyMustCall = mustCall() function* infiniteGenerate() { try { - while (true) yield 'a'; + while (true) yield 'a' } finally { - finallyMustCall(); + finallyMustCall() } } - const stream = Readable.from(infiniteGenerate()); + const stream = Readable.from(infiniteGenerate()) for await (const chunk of stream) { - bodyMustCall(); - strictEqual(chunk, 'a'); - break; + bodyMustCall() + strictEqual(chunk, 'a') + break } } async function syncPromiseSupport() { - const returnMustBeAwaited = mustCall(); - const bodyMustCall = mustCall(); + const returnMustBeAwaited = mustCall() + const bodyMustCall = mustCall() function* infiniteGenerate() { try { - while (true) yield Promise.resolve('a'); + while (true) yield Promise.resolve('a') } finally { // eslint-disable-next-line no-unsafe-finally - return { then(cb) { - returnMustBeAwaited(); - cb(); - } }; + return { + then(cb) { + returnMustBeAwaited() + cb() + } + } } } - const stream = Readable.from(infiniteGenerate()); + const stream = Readable.from(infiniteGenerate()) for await (const chunk of stream) { - bodyMustCall(); - strictEqual(chunk, 'a'); - break; + bodyMustCall() + strictEqual(chunk, 'a') + break } } async function syncRejectedSupport() { - const returnMustBeAwaited = mustCall(); - const bodyMustNotCall = mustNotCall(); - const catchMustCall = mustCall(); - const secondNextMustNotCall = mustNotCall(); + const returnMustBeAwaited = mustCall() + const bodyMustNotCall = mustNotCall() + const catchMustCall = mustCall() + const secondNextMustNotCall = mustNotCall() function* generate() { try { - yield Promise.reject('a'); - secondNextMustNotCall(); + yield Promise.reject('a') + secondNextMustNotCall() } finally { // eslint-disable-next-line no-unsafe-finally - return { then(cb) { - returnMustBeAwaited(); - cb(); - } }; + return { + then(cb) { + returnMustBeAwaited() + cb() + } + } } } - const stream = Readable.from(generate()); + const stream = Readable.from(generate()) try { for await (const chunk of stream) { - bodyMustNotCall(chunk); + bodyMustNotCall(chunk) } } catch { - catchMustCall(); + catchMustCall() } } async function noReturnAfterThrow() { - const returnMustNotCall = mustNotCall(); - const bodyMustNotCall = mustNotCall(); - const catchMustCall = mustCall(); - const nextMustCall = mustCall(); - + const returnMustNotCall = mustNotCall() + const bodyMustNotCall = mustNotCall() + const catchMustCall = mustCall() + const nextMustCall = mustCall() const stream = Readable.from({ - [Symbol.asyncIterator]() { return this; }, + [Symbol.asyncIterator]() { + return this + }, + async next() { - nextMustCall(); - throw new Error('a'); + nextMustCall() + throw new Error('a') }, + async return() { - returnMustNotCall(); - return { done: true }; - }, - }); + returnMustNotCall() + return { + done: true + } + } + }) try { for await (const chunk of stream) { - bodyMustNotCall(chunk); + bodyMustNotCall(chunk) } } catch { - catchMustCall(); + catchMustCall() } } async function closeStreamWhileNextIsPending() { - const finallyMustCall = mustCall(); - const dataMustCall = mustCall(); - - let resolveDestroy; - const destroyed = - new Promise((resolve) => { resolveDestroy = mustCall(resolve); }); - let resolveYielded; - const yielded = - new Promise((resolve) => { resolveYielded = mustCall(resolve); }); + const finallyMustCall = mustCall() + const dataMustCall = mustCall() + let resolveDestroy + const destroyed = new Promise((resolve) => { + resolveDestroy = mustCall(resolve) + }) + let resolveYielded + const yielded = new Promise((resolve) => { + resolveYielded = mustCall(resolve) + }) async function* infiniteGenerate() { try { while (true) { - yield 'a'; - resolveYielded(); - await destroyed; + yield 'a' + resolveYielded() + await destroyed } } finally { - finallyMustCall(); + finallyMustCall() } } - const stream = Readable.from(infiniteGenerate()); - + const stream = Readable.from(infiniteGenerate()) stream.on('data', (data) => { - dataMustCall(); - strictEqual(data, 'a'); - }); - + dataMustCall() + strictEqual(data, 'a') + }) yielded.then(() => { - stream.destroy(); - resolveDestroy(); - }); + stream.destroy() + resolveDestroy() + }) } async function closeAfterNullYielded() { - const finallyMustCall = mustCall(); - const dataMustCall = mustCall(3); + const finallyMustCall = mustCall() + const dataMustCall = mustCall(3) function* generate() { try { - yield 'a'; - yield 'a'; - yield 'a'; + yield 'a' + yield 'a' + yield 'a' } finally { - finallyMustCall(); + finallyMustCall() } } - const stream = Readable.from(generate()); - + const stream = Readable.from(generate()) stream.on('data', (chunk) => { - dataMustCall(); - strictEqual(chunk, 'a'); - }); + dataMustCall() + strictEqual(chunk, 'a') + }) } Promise.all([ @@ -198,15 +209,15 @@ Promise.all([ syncRejectedSupport(), noReturnAfterThrow(), closeStreamWhileNextIsPending(), - closeAfterNullYielded(), -]).then(mustCall()); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + closeAfterNullYielded() +]).then(mustCall()) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js index d1bdd26cda..60ce376ebc 100644 --- a/test/parallel/test-readable-from.js +++ b/test/parallel/test-readable-from.js @@ -1,216 +1,205 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const { mustCall } = require('../common'); -const { once } = require('events'); -const { Readable } = require('../../lib/ours/index'); -const { strictEqual, throws } = require('assert'); -const common = require('../common'); + error() {} +} +const { mustCall } = require('../common') + +const { once } = require('events') + +const { Readable } = require('../../lib/ours/index') + +const { strictEqual, throws } = require('assert') + +const common = require('../common') { throws(() => { - Readable.from(null); - }, /ERR_INVALID_ARG_TYPE/); + Readable.from(null) + }, /ERR_INVALID_ARG_TYPE/) } async function toReadableBasicSupport() { async function* generate() { - yield 'a'; - yield 'b'; - yield 'c'; + yield 'a' + yield 'b' + yield 'c' } - const stream = Readable.from(generate()); - - const expected = ['a', 'b', 'c']; + const stream = Readable.from(generate()) + const expected = ['a', 'b', 'c'] for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + strictEqual(chunk, expected.shift()) } } async function toReadableSyncIterator() { function* generate() { - yield 'a'; - yield 'b'; - yield 'c'; + yield 'a' + yield 'b' + yield 'c' } - const stream = Readable.from(generate()); - - const expected = ['a', 'b', 'c']; + const stream = Readable.from(generate()) + const expected = ['a', 'b', 'c'] for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + strictEqual(chunk, expected.shift()) } } async function toReadablePromises() { - const promises = [ - Promise.resolve('a'), - Promise.resolve('b'), - Promise.resolve('c'), - ]; - - const stream = Readable.from(promises); - - const expected = ['a', 'b', 'c']; + const promises = [Promise.resolve('a'), Promise.resolve('b'), Promise.resolve('c')] + const stream = Readable.from(promises) + const expected = ['a', 'b', 'c'] for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + strictEqual(chunk, expected.shift()) } } async function toReadableString() { - const stream = Readable.from('abc'); - - const expected = ['abc']; + const stream = Readable.from('abc') + const expected = ['abc'] for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + strictEqual(chunk, expected.shift()) } } async function toReadableBuffer() { - const stream = Readable.from(Buffer.from('abc')); - - const expected = ['abc']; + const stream = Readable.from(Buffer.from('abc')) + const expected = ['abc'] for await (const chunk of stream) { - strictEqual(chunk.toString(), expected.shift()); + strictEqual(chunk.toString(), expected.shift()) } } async function toReadableOnData() { async function* generate() { - yield 'a'; - yield 'b'; - yield 'c'; + yield 'a' + yield 'b' + yield 'c' } - const stream = Readable.from(generate()); - - let iterations = 0; - const expected = ['a', 'b', 'c']; - + const stream = Readable.from(generate()) + let iterations = 0 + const expected = ['a', 'b', 'c'] stream.on('data', (chunk) => { - iterations++; - strictEqual(chunk, expected.shift()); - }); - - await once(stream, 'end'); - - strictEqual(iterations, 3); + iterations++ + strictEqual(chunk, expected.shift()) + }) + await once(stream, 'end') + strictEqual(iterations, 3) } async function toReadableOnDataNonObject() { async function* generate() { - yield 'a'; - yield 'b'; - yield 'c'; + yield 'a' + yield 'b' + yield 'c' } - const stream = Readable.from(generate(), { objectMode: false }); - - let iterations = 0; - const expected = ['a', 'b', 'c']; - + const stream = Readable.from(generate(), { + objectMode: false + }) + let iterations = 0 + const expected = ['a', 'b', 'c'] stream.on('data', (chunk) => { - iterations++; - strictEqual(chunk instanceof Buffer, true); - strictEqual(chunk.toString(), expected.shift()); - }); - - await once(stream, 'end'); - - strictEqual(iterations, 3); + iterations++ + strictEqual(chunk instanceof Buffer, true) + strictEqual(chunk.toString(), expected.shift()) + }) + await once(stream, 'end') + strictEqual(iterations, 3) } async function destroysTheStreamWhenThrowing() { - async function* generate() { // eslint-disable-line require-yield - throw new Error('kaboom'); + async function* generate() { + // eslint-disable-line require-yield + throw new Error('kaboom') } - const stream = Readable.from(generate()); - - stream.read(); - - const [err] = await once(stream, 'error'); - strictEqual(err.message, 'kaboom'); - strictEqual(stream.destroyed, true); - + const stream = Readable.from(generate()) + stream.read() + const [err] = await once(stream, 'error') + strictEqual(err.message, 'kaboom') + strictEqual(stream.destroyed, true) } async function asTransformStream() { async function* generate(stream) { for await (const chunk of stream) { - yield chunk.toUpperCase(); + yield chunk.toUpperCase() } } const source = new Readable({ objectMode: true, + read() { - this.push('a'); - this.push('b'); - this.push('c'); - this.push(null); + this.push('a') + this.push('b') + this.push('c') + this.push(null) } - }); - - const stream = Readable.from(generate(source)); - - const expected = ['A', 'B', 'C']; + }) + const stream = Readable.from(generate(source)) + const expected = ['A', 'B', 'C'] for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + strictEqual(chunk, expected.shift()) } } async function endWithError() { async function* generate() { - yield 1; - yield 2; - yield Promise.reject('Boum'); + yield 1 + yield 2 + yield Promise.reject('Boum') } - const stream = Readable.from(generate()); - - const expected = [1, 2]; + const stream = Readable.from(generate()) + const expected = [1, 2] try { for await (const chunk of stream) { - strictEqual(chunk, expected.shift()); + strictEqual(chunk, expected.shift()) } - throw new Error(); + + throw new Error() } catch (err) { - strictEqual(expected.length, 0); - strictEqual(err, 'Boum'); + strictEqual(expected.length, 0) + strictEqual(err, 'Boum') } } async function destroyingStreamWithErrorThrowsInGenerator() { const validateError = common.mustCall((e) => { - strictEqual(e, 'Boum'); - }); + strictEqual(e, 'Boum') + }) + async function* generate() { try { - yield 1; - yield 2; - yield 3; - throw new Error(); + yield 1 + yield 2 + yield 3 + throw new Error() } catch (e) { - validateError(e); + validateError(e) } } - const stream = Readable.from(generate()); - stream.read(); - stream.once('error', common.mustCall()); - stream.destroy('Boum'); + + const stream = Readable.from(generate()) + stream.read() + stream.once('error', common.mustCall()) + stream.destroy('Boum') } Promise.all([ @@ -224,15 +213,15 @@ Promise.all([ destroysTheStreamWhenThrowing(), asTransformStream(), endWithError(), - destroyingStreamWithErrorThrowsInGenerator(), -]).then(mustCall()); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + destroyingStreamWithErrorThrowsInGenerator() +]).then(mustCall()) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js index fbfee6acbe..53251d6ec8 100644 --- a/test/parallel/test-readable-large-hwm.js +++ b/test/parallel/test-readable-large-hwm.js @@ -1,42 +1,43 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// Make sure that readable completes + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') // Make sure that readable completes // even when reading larger buffer. -const bufferSize = 10 * 1024 * 1024; -let n = 0; + +const bufferSize = 10 * 1024 * 1024 +let n = 0 const r = new Readable({ read() { // Try to fill readable buffer piece by piece. - r.push(Buffer.alloc(bufferSize / 10)); + r.push(Buffer.alloc(bufferSize / 10)) if (n++ > 10) { - r.push(null); + r.push(null) } } -}); - +}) r.on('readable', () => { while (true) { - const ret = r.read(bufferSize); - if (ret === null) - break; + const ret = r.read(bufferSize) + if (ret === null) break } -}); -r.on('end', common.mustCall()); +}) +r.on('end', common.mustCall()) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js index 4c274eb0b6..df4cced655 100644 --- a/test/parallel/test-readable-single-end.js +++ b/test/parallel/test-readable-single-end.js @@ -1,31 +1,30 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -// This test ensures that there will not be an additional empty 'readable' +const { Readable } = require('../../lib/ours/index') // This test ensures that there will not be an additional empty 'readable' // event when stream has ended (only 1 event signalling about end) const r = new Readable({ - read: () => {}, -}); - -r.push(null); - -r.on('readable', common.mustCall()); -r.on('end', common.mustCall()); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + read: () => {} +}) +r.push(null) +r.on('readable', common.mustCall()) +r.on('end', common.mustCall()) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-add-abort-signal.js b/test/parallel/test-stream-add-abort-signal.js index ba582aeb1d..c8a5e131f2 100644 --- a/test/parallel/test-stream-add-abort-signal.js +++ b/test/parallel/test-stream-add-abort-signal.js @@ -1,42 +1,43 @@ // Flags: --expose-internals +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); -const assert = require('assert'); -const { addAbortSignal, Readable } = require('../../lib/ours/index'); -const { - addAbortSignalNoValidate, -} = require('../../lib/internal/streams/add-abort-signal'); + error() {} +} +require('../common') + +const assert = require('assert') + +const { addAbortSignal, Readable } = require('../../lib/ours/index') + +const { addAbortSignalNoValidate } = require('../../lib/internal/streams/add-abort-signal') { assert.throws(() => { - addAbortSignal('INVALID_SIGNAL'); - }, /ERR_INVALID_ARG_TYPE/); - - const ac = new AbortController(); + addAbortSignal('INVALID_SIGNAL') + }, /ERR_INVALID_ARG_TYPE/) + const ac = new AbortController() assert.throws(() => { - addAbortSignal(ac.signal, 'INVALID_STREAM'); - }, /ERR_INVALID_ARG_TYPE/); + addAbortSignal(ac.signal, 'INVALID_STREAM') + }, /ERR_INVALID_ARG_TYPE/) } - { const r = new Readable({ - read: () => {}, - }); - assert.deepStrictEqual(r, addAbortSignalNoValidate('INVALID_SIGNAL', r)); + read: () => {} + }) + assert.deepStrictEqual(r, addAbortSignalNoValidate('INVALID_SIGNAL', r)) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-aliases-legacy.js b/test/parallel/test-stream-aliases-legacy.js index f30c305198..759665ef00 100644 --- a/test/parallel/test-stream-aliases-legacy.js +++ b/test/parallel/test-stream-aliases-legacy.js @@ -1,29 +1,30 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); + error() {} +} +require('../common') -const assert = require('assert'); -const stream = require('../../lib/ours/index'); +const assert = require('assert') -// Verify that all individual aliases are left in place. +const stream = require('../../lib/ours/index') // Verify that all individual aliases are left in place. -assert.strictEqual(stream.Readable, require('../../lib/_stream_readable')); -assert.strictEqual(stream.Writable, require('../../lib/_stream_writable')); -assert.strictEqual(stream.Duplex, require('../../lib/_stream_duplex')); -assert.strictEqual(stream.Transform, require('../../lib/_stream_transform')); -assert.strictEqual(stream.PassThrough, require('../../lib/_stream_passthrough')); +assert.strictEqual(stream.Readable, require('../../lib/_stream_readable')) +assert.strictEqual(stream.Writable, require('../../lib/_stream_writable')) +assert.strictEqual(stream.Duplex, require('../../lib/_stream_duplex')) +assert.strictEqual(stream.Transform, require('../../lib/_stream_transform')) +assert.strictEqual(stream.PassThrough, require('../../lib/_stream_passthrough')) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs index a103920eef..35919114a9 100644 --- a/test/parallel/test-stream-asIndexedPairs.mjs +++ b/test/parallel/test-stream-asIndexedPairs.mjs @@ -1,64 +1,82 @@ -import '../common/index.mjs'; -import { Readable }from '../../lib/ours/index.js'; -import { deepStrictEqual, rejects, throws } from 'assert'; -import tap from 'tap'; +import '../common/index.mjs' +import { Readable } from '../../lib/ours/index.js' +import { deepStrictEqual, rejects, throws } from 'assert' +import tap from 'tap' { // asIndexedPairs with a synchronous stream - const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray(); - deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); - const empty = await Readable.from([]).asIndexedPairs().toArray(); - deepStrictEqual(empty, []); + const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray() + deepStrictEqual(pairs, [ + [0, 1], + [1, 2], + [2, 3] + ]) + const empty = await Readable.from([]).asIndexedPairs().toArray() + deepStrictEqual(empty, []) } { // asIndexedPairs works an asynchronous streams - const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x); - const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray(); - deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); - const empty = await asyncFrom([]).asIndexedPairs().toArray(); - deepStrictEqual(empty, []); + const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x) + const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray() + deepStrictEqual(pairs, [ + [0, 1], + [1, 2], + [2, 3] + ]) + const empty = await asyncFrom([]).asIndexedPairs().toArray() + deepStrictEqual(empty, []) } { // Does not enumerate an infinite stream - const infinite = () => Readable.from(async function* () { - while (true) yield 1; - }()); - const pairs = await infinite().asIndexedPairs().take(3).toArray(); - deepStrictEqual(pairs, [[0, 1], [1, 1], [2, 1]]); - const empty = await infinite().asIndexedPairs().take(0).toArray(); - deepStrictEqual(empty, []); + const infinite = () => + Readable.from( + (async function* () { + while (true) yield 1 + })() + ) + const pairs = await infinite().asIndexedPairs().take(3).toArray() + deepStrictEqual(pairs, [ + [0, 1], + [1, 1], + [2, 1] + ]) + const empty = await infinite().asIndexedPairs().take(0).toArray() + deepStrictEqual(empty, []) } { // AbortSignal - await rejects(async () => { - const ac = new AbortController(); - const { signal } = ac; - const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); - ac.abort(); - await p; - }, { name: 'AbortError' }); + await rejects( + async () => { + const ac = new AbortController() + const { signal } = ac + const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray() + ac.abort() + await p + }, + { name: 'AbortError' } + ) await rejects(async () => { - const signal = AbortSignal.abort(); - await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); - }, /AbortError/); + const signal = AbortSignal.abort() + await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray() + }, /AbortError/) } { // Error cases - throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/); - throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/) + throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/) } - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js index f1f4ade5e7..77454f4140 100644 --- a/test/parallel/test-stream-auto-destroy.js +++ b/test/parallel/test-stream-auto-destroy.js @@ -1,127 +1,139 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') { const r = new stream.Readable({ autoDestroy: true, + read() { - this.push('hello'); - this.push('world'); - this.push(null); + this.push('hello') + this.push('world') + this.push(null) }, - destroy: common.mustCall((err, cb) => cb()) - }); - - let ended = false; - - r.resume(); - r.on('end', common.mustCall(() => { - ended = true; - })); - - r.on('close', common.mustCall(() => { - assert(ended); - })); + destroy: common.mustCall((err, cb) => cb()) + }) + let ended = false + r.resume() + r.on( + 'end', + common.mustCall(() => { + ended = true + }) + ) + r.on( + 'close', + common.mustCall(() => { + assert(ended) + }) + ) } - { const w = new stream.Writable({ autoDestroy: true, + write(data, enc, cb) { - cb(null); + cb(null) }, - destroy: common.mustCall((err, cb) => cb()) - }); - - let finished = false; - - w.write('hello'); - w.write('world'); - w.end(); - - w.on('finish', common.mustCall(() => { - finished = true; - })); - w.on('close', common.mustCall(() => { - assert(finished); - })); + destroy: common.mustCall((err, cb) => cb()) + }) + let finished = false + w.write('hello') + w.write('world') + w.end() + w.on( + 'finish', + common.mustCall(() => { + finished = true + }) + ) + w.on( + 'close', + common.mustCall(() => { + assert(finished) + }) + ) } - { const t = new stream.Transform({ autoDestroy: true, + transform(data, enc, cb) { - cb(null, data); + cb(null, data) }, - destroy: common.mustCall((err, cb) => cb()) - }); - - let ended = false; - let finished = false; - - t.write('hello'); - t.write('world'); - t.end(); - - t.resume(); - - t.on('end', common.mustCall(() => { - ended = true; - })); - t.on('finish', common.mustCall(() => { - finished = true; - })); - - t.on('close', common.mustCall(() => { - assert(ended); - assert(finished); - })); + destroy: common.mustCall((err, cb) => cb()) + }) + let ended = false + let finished = false + t.write('hello') + t.write('world') + t.end() + t.resume() + t.on( + 'end', + common.mustCall(() => { + ended = true + }) + ) + t.on( + 'finish', + common.mustCall(() => { + finished = true + }) + ) + t.on( + 'close', + common.mustCall(() => { + assert(ended) + assert(finished) + }) + ) } - { const r = new stream.Readable({ read() { - r2.emit('error', new Error('fail')); + r2.emit('error', new Error('fail')) } - }); + }) const r2 = new stream.Readable({ autoDestroy: true, destroy: common.mustCall((err, cb) => cb()) - }); - - r.pipe(r2); + }) + r.pipe(r2) } - { const r = new stream.Readable({ read() { - w.emit('error', new Error('fail')); + w.emit('error', new Error('fail')) } - }); + }) const w = new stream.Writable({ autoDestroy: true, destroy: common.mustCall((err, cb) => cb()) - }); - - r.pipe(w); + }) + r.pipe(w) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js index 5cd9c7e613..7a6f7d683f 100644 --- a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js +++ b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js @@ -1,43 +1,42 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { PassThrough } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const { PassThrough } = require('../../lib/ours/index') const encode = new PassThrough({ highWaterMark: 1 -}); - +}) const decode = new PassThrough({ highWaterMark: 1 -}); - +}) const send = common.mustCall((buf) => { - encode.write(buf); -}, 4); - -let i = 0; + encode.write(buf) +}, 4) +let i = 0 const onData = common.mustCall(() => { if (++i === 2) { - send(Buffer.from([0x3])); - send(Buffer.from([0x4])); + send(Buffer.from([0x3])) + send(Buffer.from([0x4])) } -}, 4); - -encode.pipe(decode).on('data', onData); - -send(Buffer.from([0x1])); -send(Buffer.from([0x2])); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}, 4) +encode.pipe(decode).on('data', onData) +send(Buffer.from([0x1])) +send(Buffer.from([0x2])) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-backpressure.js b/test/parallel/test-stream-backpressure.js index 0265970ae2..d9072aec52 100644 --- a/test/parallel/test-stream-backpressure.js +++ b/test/parallel/test-stream-backpressure.js @@ -1,54 +1,53 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -let pushes = 0; -const total = 65500 + 40 * 1024; +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +let pushes = 0 +const total = 65500 + 40 * 1024 const rs = new stream.Readable({ - read: common.mustCall(function() { + read: common.mustCall(function () { if (pushes++ === 10) { - this.push(null); - return; + this.push(null) + return } - const length = this._readableState.length; - - // We are at most doing two full runs of _reads + const length = this._readableState.length // We are at most doing two full runs of _reads // before stopping, because Readable is greedy // to keep its buffer full - assert(length <= total); - this.push(Buffer.alloc(65500)); - for (let i = 0; i < 40; i++) { - this.push(Buffer.alloc(1024)); - } + assert(length <= total) + this.push(Buffer.alloc(65500)) - // We will be over highWaterMark at this point + for (let i = 0; i < 40; i++) { + this.push(Buffer.alloc(1024)) + } // We will be over highWaterMark at this point // but a new call to _read is scheduled anyway. }, 11) -}); - +}) const ws = stream.Writable({ - write: common.mustCall(function(data, enc, cb) { - setImmediate(cb); + write: common.mustCall(function (data, enc, cb) { + setImmediate(cb) }, 41 * 10) -}); - -rs.pipe(ws); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +rs.pipe(ws) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-base-prototype-accessors-enumerability.js b/test/parallel/test-stream-base-prototype-accessors-enumerability.js index 7f3c6917e7..c58a0f8176 100644 --- a/test/parallel/test-stream-base-prototype-accessors-enumerability.js +++ b/test/parallel/test-stream-base-prototype-accessors-enumerability.js @@ -1,36 +1,34 @@ // Flags: --expose-internals +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); - -// This tests that the prototype accessors added by StreamBase::AddMethods + error() {} +} +require('../common') // This tests that the prototype accessors added by StreamBase::AddMethods // are not enumerable. They could be enumerated when inspecting the prototype // with util.inspect or the inspector protocol. -const assert = require('assert'); +const assert = require('assert') // Or anything that calls StreamBase::AddMethods when setting up its prototype -// Or anything that calls StreamBase::AddMethods when setting up its prototype const internalBinding = process.binding -const TTY = internalBinding('tty_wrap').TTY; - +const TTY = internalBinding('tty_wrap').TTY { - const ttyIsEnumerable = Object.prototype.propertyIsEnumerable.bind(TTY); - assert.strictEqual(ttyIsEnumerable('bytesRead'), false); - assert.strictEqual(ttyIsEnumerable('fd'), false); - assert.strictEqual(ttyIsEnumerable('_externalStream'), false); + const ttyIsEnumerable = Object.prototype.propertyIsEnumerable.bind(TTY) + assert.strictEqual(ttyIsEnumerable('bytesRead'), false) + assert.strictEqual(ttyIsEnumerable('fd'), false) + assert.strictEqual(ttyIsEnumerable('_externalStream'), false) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-base-typechecking.js b/test/parallel/test-stream-base-typechecking.js index 73bcc8d0a5..fdf895dfbe 100644 --- a/test/parallel/test-stream-base-typechecking.js +++ b/test/parallel/test-stream-base-typechecking.js @@ -1,33 +1,47 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const net = require('net'); +const silentConsole = { + log() {}, -const server = net.createServer().listen(0, common.mustCall(() => { - const client = net.connect(server.address().port, common.mustCall(() => { - assert.throws(() => { - client.write('broken', 'buffer'); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_TYPE', - message: 'Second argument must be a buffer' - }); - client.destroy(); - server.close(); - })); -})); + error() {} +} +const common = require('../common') - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const assert = require('assert') + +const net = require('net') + +const server = net.createServer().listen( + 0, + common.mustCall(() => { + const client = net.connect( + server.address().port, + common.mustCall(() => { + assert.throws( + () => { + client.write('broken', 'buffer') + }, + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_TYPE', + message: 'Second argument must be a buffer' + } + ) + client.destroy() + server.close() + }) + ) + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js index 671a6965e3..f6591a7b59 100644 --- a/test/parallel/test-stream-big-packet.js +++ b/test/parallel/test-stream-big-packet.js @@ -18,63 +18,66 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') -let passed = false; +const stream = require('../../lib/ours/index') + +let passed = false class TestStream extends stream.Transform { _transform(chunk, encoding, done) { if (!passed) { // Char 'a' only exists in the last write - passed = chunk.toString().includes('a'); + passed = chunk.toString().includes('a') } - done(); + + done() } } const s1 = new stream.Transform({ transform(chunk, encoding, cb) { - process.nextTick(cb, null, chunk); + process.nextTick(cb, null, chunk) } -}); -const s2 = new stream.PassThrough(); -const s3 = new TestStream(); -s1.pipe(s3); -// Don't let s2 auto close which may close s3 -s2.pipe(s3, { end: false }); - -// We must write a buffer larger than highWaterMark -const big = Buffer.alloc(s1.writableHighWaterMark + 1, 'x'); - -// Since big is larger than highWaterMark, it will be buffered internally. -assert(!s1.write(big)); -// 'tiny' is small enough to pass through internal buffer. -assert(s2.write('tiny')); - -// Write some small data in next IO loop, which will never be written to s3 +}) +const s2 = new stream.PassThrough() +const s3 = new TestStream() +s1.pipe(s3) // Don't let s2 auto close which may close s3 + +s2.pipe(s3, { + end: false +}) // We must write a buffer larger than highWaterMark + +const big = Buffer.alloc(s1.writableHighWaterMark + 1, 'x') // Since big is larger than highWaterMark, it will be buffered internally. + +assert(!s1.write(big)) // 'tiny' is small enough to pass through internal buffer. + +assert(s2.write('tiny')) // Write some small data in next IO loop, which will never be written to s3 // Because 'drain' event is not emitted from s1 and s1 is still paused -setImmediate(s1.write.bind(s1), 'later'); - -// Assert after two IO loops when all operations have been done. -process.on('exit', function() { - assert(passed, 'Large buffer is not handled properly by Writable Stream'); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +setImmediate(s1.write.bind(s1), 'later') // Assert after two IO loops when all operations have been done. + +process.on('exit', function () { + assert(passed, 'Large buffer is not handled properly by Writable Stream') +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js index 62190aa834..b48eb437be 100644 --- a/test/parallel/test-stream-big-push.js +++ b/test/parallel/test-stream-big-push.js @@ -18,72 +18,71 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); -const str = 'asdfasdfasdfasdfasdf'; + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') +const str = 'asdfasdfasdfasdfasdf' const r = new stream.Readable({ highWaterMark: 5, encoding: 'utf8' -}); - -let reads = 0; +}) +let reads = 0 function _read() { if (reads === 0) { setTimeout(() => { - r.push(str); - }, 1); - reads++; + r.push(str) + }, 1) + reads++ } else if (reads === 1) { - const ret = r.push(str); - assert.strictEqual(ret, false); - reads++; + const ret = r.push(str) + assert.strictEqual(ret, false) + reads++ } else { - r.push(null); + r.push(null) } } -r._read = common.mustCall(_read, 3); - -r.on('end', common.mustCall()); - -// Push some data in to start. +r._read = common.mustCall(_read, 3) +r.on('end', common.mustCall()) // Push some data in to start. // We've never gotten any read event at this point. -const ret = r.push(str); -// Should be false. > hwm -assert(!ret); -let chunk = r.read(); -assert.strictEqual(chunk, str); -chunk = r.read(); -assert.strictEqual(chunk, null); +const ret = r.push(str) // Should be false. > hwm + +assert(!ret) +let chunk = r.read() +assert.strictEqual(chunk, str) +chunk = r.read() +assert.strictEqual(chunk, null) r.once('readable', () => { // This time, we'll get *all* the remaining data, because // it's been added synchronously, as the read WOULD take // us below the hwm, and so it triggered a _read() again, // which synchronously added more, which we then return. - chunk = r.read(); - assert.strictEqual(chunk, str + str); - - chunk = r.read(); - assert.strictEqual(chunk, null); -}); + chunk = r.read() + assert.strictEqual(chunk, str + str) + chunk = r.read() + assert.strictEqual(chunk, null) +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js index 2accace65f..adcd14310d 100644 --- a/test/parallel/test-stream-buffer-list.js +++ b/test/parallel/test-stream-buffer-list.js @@ -1,99 +1,92 @@ // Flags: --expose-internals +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const BufferList = require('../../lib/internal/streams/buffer_list'); +const silentConsole = { + log() {}, -// Test empty buffer list. -const emptyList = new BufferList(); - -emptyList.shift(); -assert.deepStrictEqual(emptyList, new BufferList()); + error() {} +} +require('../common') -assert.strictEqual(emptyList.join(','), ''); +const assert = require('assert') -assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0)); +const BufferList = require('../../lib/internal/streams/buffer_list') // Test empty buffer list. -const buf = Buffer.from('foo'); +const emptyList = new BufferList() +emptyList.shift() +assert.deepStrictEqual(emptyList, new BufferList()) +assert.strictEqual(emptyList.join(','), '') +assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0)) +const buf = Buffer.from('foo') function testIterator(list, count) { // test iterator - let len = 0; - // eslint-disable-next-line no-unused-vars + let len = 0 // eslint-disable-next-line no-unused-vars + for (const x of list) { - len++; + len++ } - assert.strictEqual(len, count); -} -// Test buffer list with one element. -const list = new BufferList(); -testIterator(list, 0); + assert.strictEqual(len, count) +} // Test buffer list with one element. + +const list = new BufferList() +testIterator(list, 0) +list.push(buf) +testIterator(list, 1) -list.push(buf); -testIterator(list, 1); for (const x of list) { - assert.strictEqual(x, buf); + assert.strictEqual(x, buf) } -const copy = list.concat(3); -testIterator(copy, 3); - -assert.notStrictEqual(copy, buf); -assert.deepStrictEqual(copy, buf); - -assert.strictEqual(list.join(','), 'foo'); - -const shifted = list.shift(); -testIterator(list, 0); -assert.strictEqual(shifted, buf); -assert.deepStrictEqual(list, new BufferList()); - +const copy = list.concat(3) +testIterator(copy, 3) +assert.notStrictEqual(copy, buf) +assert.deepStrictEqual(copy, buf) +assert.strictEqual(list.join(','), 'foo') +const shifted = list.shift() +testIterator(list, 0) +assert.strictEqual(shifted, buf) +assert.deepStrictEqual(list, new BufferList()) { - const list = new BufferList(); - list.push('foo'); - list.push('bar'); - list.push('foo'); - list.push('bar'); - assert.strictEqual(list.consume(6, true), 'foobar'); - assert.strictEqual(list.consume(6, true), 'foobar'); + const list = new BufferList() + list.push('foo') + list.push('bar') + list.push('foo') + list.push('bar') + assert.strictEqual(list.consume(6, true), 'foobar') + assert.strictEqual(list.consume(6, true), 'foobar') } - { - const list = new BufferList(); - list.push('foo'); - list.push('bar'); - assert.strictEqual(list.consume(5, true), 'fooba'); + const list = new BufferList() + list.push('foo') + list.push('bar') + assert.strictEqual(list.consume(5, true), 'fooba') } - { - const list = new BufferList(); - list.push(buf); - list.push(buf); - list.push(buf); - list.push(buf); - assert.strictEqual(list.consume(6).toString(), 'foofoo'); - assert.strictEqual(list.consume(6).toString(), 'foofoo'); + const list = new BufferList() + list.push(buf) + list.push(buf) + list.push(buf) + list.push(buf) + assert.strictEqual(list.consume(6).toString(), 'foofoo') + assert.strictEqual(list.consume(6).toString(), 'foofoo') } - { - const list = new BufferList(); - list.push(buf); - list.push(buf); - assert.strictEqual(list.consume(5).toString(), 'foofo'); + const list = new BufferList() + list.push(buf) + list.push(buf) + assert.strictEqual(list.consume(5).toString(), 'foofo') } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-catch-rejections.js b/test/parallel/test-stream-catch-rejections.js index cf65a8c24f..9a39c0870c 100644 --- a/test/parallel/test-stream-catch-rejections.js +++ b/test/parallel/test-stream-catch-rejections.js @@ -1,66 +1,72 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') { const r = new stream.Readable({ captureRejections: true, - read() { - } - }); - r.push('hello'); - r.push('world'); - - const err = new Error('kaboom'); - - r.on('error', common.mustCall((_err) => { - assert.strictEqual(err, _err); - assert.strictEqual(r.destroyed, true); - })); + read() {} + }) + r.push('hello') + r.push('world') + const err = new Error('kaboom') + r.on( + 'error', + common.mustCall((_err) => { + assert.strictEqual(err, _err) + assert.strictEqual(r.destroyed, true) + }) + ) r.on('data', async () => { - throw err; - }); + throw err + }) } - { const w = new stream.Writable({ captureRejections: true, highWaterMark: 1, + write(chunk, enc, cb) { - process.nextTick(cb); + process.nextTick(cb) } - }); - - const err = new Error('kaboom'); - + }) + const err = new Error('kaboom') w.write('hello', () => { - w.write('world'); - }); - - w.on('error', common.mustCall((_err) => { - assert.strictEqual(err, _err); - assert.strictEqual(w.destroyed, true); - })); - - w.on('drain', common.mustCall(async () => { - throw err; - }, 2)); + w.write('world') + }) + w.on( + 'error', + common.mustCall((_err) => { + assert.strictEqual(err, _err) + assert.strictEqual(w.destroyed, true) + }) + ) + w.on( + 'drain', + common.mustCall(async () => { + throw err + }, 2) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-compose.js b/test/parallel/test-stream-compose.js index 1cafda10cd..2cf56da13f 100644 --- a/test/parallel/test-stream-compose.js +++ b/test/parallel/test-stream-compose.js @@ -1,440 +1,501 @@ // Flags: --expose-internals +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; + error() {} +} +const common = require('../common') + +const { Readable, Transform, Writable, finished, PassThrough } = require('../../lib/ours/index') -const common = require('../common'); -const { - Readable, - Transform, - Writable, - finished, - PassThrough -} = require('../../lib/ours/index'); -const compose = require('../../lib/internal/streams/compose'); -const assert = require('assert'); +const compose = require('../../lib/internal/streams/compose') + +const assert = require('assert') { - let res = ''; + let res = '' compose( new Transform({ transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk + chunk); + callback(null, chunk + chunk) }) }), new Transform({ transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk.toString().toUpperCase()); + callback(null, chunk.toString().toUpperCase()) }) }) ) - .end('asd') - .on('data', common.mustCall((buf) => { - res += buf; - })) - .on('end', common.mustCall(() => { - assert.strictEqual(res, 'ASDASD'); - })); + .end('asd') + .on( + 'data', + common.mustCall((buf) => { + res += buf + }) + ) + .on( + 'end', + common.mustCall(() => { + assert.strictEqual(res, 'ASDASD') + }) + ) } - { - let res = ''; + let res = '' compose( - async function*(source) { + async function* (source) { for await (const chunk of source) { - yield chunk + chunk; + yield chunk + chunk } }, - async function*(source) { + async function* (source) { for await (const chunk of source) { - yield chunk.toString().toUpperCase(); + yield chunk.toString().toUpperCase() } } ) - .end('asd') - .on('data', common.mustCall((buf) => { - res += buf; - })) - .on('end', common.mustCall(() => { - assert.strictEqual(res, 'ASDASD'); - })); + .end('asd') + .on( + 'data', + common.mustCall((buf) => { + res += buf + }) + ) + .on( + 'end', + common.mustCall(() => { + assert.strictEqual(res, 'ASDASD') + }) + ) } - { - let res = ''; - compose( - async function*(source) { - for await (const chunk of source) { - yield chunk + chunk; - } + let res = '' + compose(async function* (source) { + for await (const chunk of source) { + yield chunk + chunk } - ) - .end('asd') - .on('data', common.mustCall((buf) => { - res += buf; - })) - .on('end', common.mustCall(() => { - assert.strictEqual(res, 'asdasd'); - })); + }) + .end('asd') + .on( + 'data', + common.mustCall((buf) => { + res += buf + }) + ) + .on( + 'end', + common.mustCall(() => { + assert.strictEqual(res, 'asdasd') + }) + ) } - { - let res = ''; + let res = '' compose( Readable.from(['asd']), new Transform({ transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk.toString().toUpperCase()); + callback(null, chunk.toString().toUpperCase()) }) }) ) - .on('data', common.mustCall((buf) => { - res += buf; - })) - .on('end', common.mustCall(() => { - assert.strictEqual(res, 'ASD'); - })); + .on( + 'data', + common.mustCall((buf) => { + res += buf + }) + ) + .on( + 'end', + common.mustCall(() => { + assert.strictEqual(res, 'ASD') + }) + ) } - { - let res = ''; + let res = '' compose( - async function* () { - yield 'asd'; - }(), + (async function* () { + yield 'asd' + })(), new Transform({ transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk.toString().toUpperCase()); + callback(null, chunk.toString().toUpperCase()) }) }) ) - .on('data', common.mustCall((buf) => { - res += buf; - })) - .on('end', common.mustCall(() => { - assert.strictEqual(res, 'ASD'); - })); + .on( + 'data', + common.mustCall((buf) => { + res += buf + }) + ) + .on( + 'end', + common.mustCall(() => { + assert.strictEqual(res, 'ASD') + }) + ) } - { - let res = ''; + let res = '' compose( new Transform({ transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk.toString().toUpperCase()); + callback(null, chunk.toString().toUpperCase()) }) }), - async function*(source) { + async function* (source) { for await (const chunk of source) { - yield chunk; + yield chunk } }, new Writable({ write: common.mustCall((chunk, encoding, callback) => { - res += chunk; - callback(null); + res += chunk + callback(null) }) }) ) - .end('asd') - .on('finish', common.mustCall(() => { - assert.strictEqual(res, 'ASD'); - })); + .end('asd') + .on( + 'finish', + common.mustCall(() => { + assert.strictEqual(res, 'ASD') + }) + ) } - { - let res = ''; + let res = '' compose( new Transform({ transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk.toString().toUpperCase()); + callback(null, chunk.toString().toUpperCase()) }) }), - async function*(source) { + async function* (source) { for await (const chunk of source) { - yield chunk; + yield chunk } }, - async function(source) { + async function (source) { for await (const chunk of source) { - res += chunk; + res += chunk } } ) - .end('asd') - .on('finish', common.mustCall(() => { - assert.strictEqual(res, 'ASD'); - })); + .end('asd') + .on( + 'finish', + common.mustCall(() => { + assert.strictEqual(res, 'ASD') + }) + ) } - { - let res; + let res compose( new Transform({ objectMode: true, transform: common.mustCall((chunk, encoding, callback) => { - callback(null, { chunk }); + callback(null, { + chunk + }) }) }), - async function*(source) { + async function* (source) { for await (const chunk of source) { - yield chunk; + yield chunk } }, new Transform({ objectMode: true, transform: common.mustCall((chunk, encoding, callback) => { - callback(null, { chunk }); + callback(null, { + chunk + }) }) }) ) - .end(true) - .on('data', common.mustCall((buf) => { - res = buf; - })) - .on('end', common.mustCall(() => { - assert.strictEqual(res.chunk.chunk, true); - })); + .end(true) + .on( + 'data', + common.mustCall((buf) => { + res = buf + }) + ) + .on( + 'end', + common.mustCall(() => { + assert.strictEqual(res.chunk.chunk, true) + }) + ) } - { - const _err = new Error('asd'); + const _err = new Error('asd') + compose( new Transform({ objectMode: true, transform: common.mustCall((chunk, encoding, callback) => { - callback(_err); + callback(_err) }) }), - async function*(source) { + async function* (source) { for await (const chunk of source) { - yield chunk; + yield chunk } }, new Transform({ objectMode: true, transform: common.mustNotCall((chunk, encoding, callback) => { - callback(null, { chunk }); + callback(null, { + chunk + }) }) }) ) - .end(true) - .on('data', common.mustNotCall()) - .on('end', common.mustNotCall()) - .on('error', (err) => { - assert.strictEqual(err, _err); - }); + .end(true) + .on('data', common.mustNotCall()) + .on('end', common.mustNotCall()) + .on('error', (err) => { + assert.strictEqual(err, _err) + }) } - { - const _err = new Error('asd'); + const _err = new Error('asd') + compose( new Transform({ objectMode: true, transform: common.mustCall((chunk, encoding, callback) => { - callback(null, chunk); + callback(null, chunk) }) }), - async function*(source) { // eslint-disable-line require-yield - let tmp = ''; + async function* (source) { + // eslint-disable-line require-yield + let tmp = '' + for await (const chunk of source) { - tmp += chunk; - throw _err; + tmp += chunk + throw _err } - return tmp; + + return tmp }, new Transform({ objectMode: true, transform: common.mustNotCall((chunk, encoding, callback) => { - callback(null, { chunk }); + callback(null, { + chunk + }) }) }) ) - .end(true) - .on('data', common.mustNotCall()) - .on('end', common.mustNotCall()) - .on('error', (err) => { - assert.strictEqual(err, _err); - }); + .end(true) + .on('data', common.mustNotCall()) + .on('end', common.mustNotCall()) + .on('error', (err) => { + assert.strictEqual(err, _err) + }) } - { - let buf = ''; - - // Convert into readable Duplex. - const s1 = compose(async function* () { - yield 'Hello'; - yield 'World'; - }(), async function* (source) { - for await (const chunk of source) { - yield String(chunk).toUpperCase(); - } - }, async function(source) { - for await (const chunk of source) { - buf += chunk; + let buf = '' // Convert into readable Duplex. + + const s1 = compose( + (async function* () { + yield 'Hello' + yield 'World' + })(), + async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase() + } + }, + async function (source) { + for await (const chunk of source) { + buf += chunk + } } - }); - - assert.strictEqual(s1.writable, false); - assert.strictEqual(s1.readable, false); - - finished(s1.resume(), common.mustCall((err) => { - assert(!err); - assert.strictEqual(buf, 'HELLOWORLD'); - })); + ) + assert.strictEqual(s1.writable, false) + assert.strictEqual(s1.readable, false) + finished( + s1.resume(), + common.mustCall((err) => { + assert(!err) + assert.strictEqual(buf, 'HELLOWORLD') + }) + ) } - { - let buf = ''; - // Convert into transform duplex. + let buf = '' // Convert into transform duplex. + const s2 = compose(async function* (source) { for await (const chunk of source) { - yield String(chunk).toUpperCase(); + yield String(chunk).toUpperCase() } - }); - s2.end('helloworld'); - s2.resume(); + }) + s2.end('helloworld') + s2.resume() s2.on('data', (chunk) => { - buf += chunk; - }); - - finished(s2.resume(), common.mustCall((err) => { - assert(!err); - assert.strictEqual(buf, 'HELLOWORLD'); - })); + buf += chunk + }) + finished( + s2.resume(), + common.mustCall((err) => { + assert(!err) + assert.strictEqual(buf, 'HELLOWORLD') + }) + ) } - { - let buf = ''; + let buf = '' // Convert into readable Duplex. - // Convert into readable Duplex. - const s1 = compose(async function* () { - yield 'Hello'; - yield 'World'; - }()); + const s1 = compose( + (async function* () { + yield 'Hello' + yield 'World' + })() + ) // Convert into transform duplex. - // Convert into transform duplex. const s2 = compose(async function* (source) { for await (const chunk of source) { - yield String(chunk).toUpperCase(); + yield String(chunk).toUpperCase() } - }); + }) // Convert into writable duplex. - // Convert into writable duplex. - const s3 = compose(async function(source) { + const s3 = compose(async function (source) { for await (const chunk of source) { - buf += chunk; + buf += chunk } - }); - - const s4 = compose(s1, s2, s3); - - finished(s4, common.mustCall((err) => { - assert(!err); - assert.strictEqual(buf, 'HELLOWORLD'); - })); + }) + const s4 = compose(s1, s2, s3) + finished( + s4, + common.mustCall((err) => { + assert(!err) + assert.strictEqual(buf, 'HELLOWORLD') + }) + ) } - { - let buf = ''; - - // Convert into readable Duplex. - const s1 = compose(async function* () { - yield 'Hello'; - yield 'World'; - }(), async function* (source) { - for await (const chunk of source) { - yield String(chunk).toUpperCase(); - } - }, async function(source) { - for await (const chunk of source) { - buf += chunk; + let buf = '' // Convert into readable Duplex. + + const s1 = compose( + (async function* () { + yield 'Hello' + yield 'World' + })(), + async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase() + } + }, + async function (source) { + for await (const chunk of source) { + buf += chunk + } } - }); - - finished(s1, common.mustCall((err) => { - assert(!err); - assert.strictEqual(buf, 'HELLOWORLD'); - })); + ) + finished( + s1, + common.mustCall((err) => { + assert(!err) + assert.strictEqual(buf, 'HELLOWORLD') + }) + ) } - { try { - compose(); + compose() } catch (err) { - assert.strictEqual(err.code, 'ERR_MISSING_ARGS'); + assert.strictEqual(err.code, 'ERR_MISSING_ARGS') } } - { try { - compose(new Writable(), new PassThrough()); + compose(new Writable(), new PassThrough()) } catch (err) { - assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE'); + assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE') } } - { try { - compose(new PassThrough(), new Readable({ read() {} }), new PassThrough()); + compose( + new PassThrough(), + new Readable({ + read() {} + }), + new PassThrough() + ) } catch (err) { - assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE'); + assert.strictEqual(err.code, 'ERR_INVALID_ARG_VALUE') } } - { - let buf = ''; + let buf = '' // Convert into readable Duplex. + + const s1 = compose( + (async function* () { + yield 'Hello' + yield 'World' + })(), + async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase() + } + }, + async function (source) { + for await (const chunk of source) { + buf += chunk + } - // Convert into readable Duplex. - const s1 = compose(async function* () { - yield 'Hello'; - yield 'World'; - }(), async function* (source) { - for await (const chunk of source) { - yield String(chunk).toUpperCase(); - } - }, async function(source) { - for await (const chunk of source) { - buf += chunk; + return buf } - return buf; - }); - - finished(s1, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_INVALID_RETURN_VALUE'); - })); + ) + finished( + s1, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_INVALID_RETURN_VALUE') + }) + ) } - { - let buf = ''; + let buf = '' // Convert into readable Duplex. - // Convert into readable Duplex. - const s1 = compose('HelloWorld', async function* (source) { - for await (const chunk of source) { - yield String(chunk).toUpperCase(); - } - }, async function(source) { - for await (const chunk of source) { - buf += chunk; + const s1 = compose( + 'HelloWorld', + async function* (source) { + for await (const chunk of source) { + yield String(chunk).toUpperCase() + } + }, + async function (source) { + for await (const chunk of source) { + buf += chunk + } } - }); - - finished(s1, common.mustCall((err) => { - assert(!err); - assert.strictEqual(buf, 'HELLOWORLD'); - })); + ) + finished( + s1, + common.mustCall((err) => { + assert(!err) + assert.strictEqual(buf, 'HELLOWORLD') + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-construct.js b/test/parallel/test-stream-construct.js index 86165797bf..03aa33f862 100644 --- a/test/parallel/test-stream-construct.js +++ b/test/parallel/test-stream-construct.js @@ -1,295 +1,340 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Writable, Readable, Duplex } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Writable, Readable, Duplex } = require('../../lib/ours/index') + +const assert = require('assert') { // Multiple callback. new Writable({ construct: common.mustCall((callback) => { - callback(); - callback(); + callback() + callback() + }) + }).on( + 'error', + common.expectsError({ + name: 'Error', + code: 'ERR_MULTIPLE_CALLBACK' }) - }).on('error', common.expectsError({ - name: 'Error', - code: 'ERR_MULTIPLE_CALLBACK' - })); + ) } - { // Multiple callback. new Readable({ construct: common.mustCall((callback) => { - callback(); - callback(); + callback() + callback() + }) + }).on( + 'error', + common.expectsError({ + name: 'Error', + code: 'ERR_MULTIPLE_CALLBACK' }) - }).on('error', common.expectsError({ - name: 'Error', - code: 'ERR_MULTIPLE_CALLBACK' - })); + ) } - { // Synchronous error. - new Writable({ construct: common.mustCall((callback) => { - callback(new Error('test')); + callback(new Error('test')) + }) + }).on( + 'error', + common.expectsError({ + name: 'Error', + message: 'test' }) - }).on('error', common.expectsError({ - name: 'Error', - message: 'test' - })); + ) } - { // Synchronous error. - new Readable({ construct: common.mustCall((callback) => { - callback(new Error('test')); + callback(new Error('test')) }) - }).on('error', common.expectsError({ - name: 'Error', - message: 'test' - })); + }).on( + 'error', + common.expectsError({ + name: 'Error', + message: 'test' + }) + ) } - { // Asynchronous error. - new Writable({ construct: common.mustCall((callback) => { - process.nextTick(callback, new Error('test')); + process.nextTick(callback, new Error('test')) }) - }).on('error', common.expectsError({ - name: 'Error', - message: 'test' - })); + }).on( + 'error', + common.expectsError({ + name: 'Error', + message: 'test' + }) + ) } - { // Asynchronous error. - new Readable({ construct: common.mustCall((callback) => { - process.nextTick(callback, new Error('test')); + process.nextTick(callback, new Error('test')) + }) + }).on( + 'error', + common.expectsError({ + name: 'Error', + message: 'test' }) - }).on('error', common.expectsError({ - name: 'Error', - message: 'test' - })); + ) } function testDestroy(factory) { { - let constructed = false; + let constructed = false const s = factory({ construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) }) - }); - s.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - s.destroy(); + }) + s.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) + }) + ) + s.destroy() } - { - let constructed = false; + let constructed = false const s = factory({ construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) }) - }); - s.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); + }) + s.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) + }) + ) s.destroy(null, () => { - assert.strictEqual(constructed, true); - }); + assert.strictEqual(constructed, true) + }) } - { - let constructed = false; + let constructed = false const s = factory({ construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) + }) + }) + s.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) }) - }); - s.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - s.destroy(); + ) + s.destroy() } - - { - let constructed = false; + let constructed = false const s = factory({ construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) + }) + }) + s.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) }) - }); - s.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - s.on('error', common.mustCall((err) => { - assert.strictEqual(err.message, 'kaboom'); - })); + ) + s.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom') + }) + ) s.destroy(new Error('kaboom'), (err) => { - assert.strictEqual(err.message, 'kaboom'); - assert.strictEqual(constructed, true); - }); + assert.strictEqual(err.message, 'kaboom') + assert.strictEqual(constructed, true) + }) } - { - let constructed = false; + let constructed = false const s = factory({ construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) }) - }); - s.on('error', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - s.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - s.destroy(new Error()); + }) + s.on( + 'error', + common.mustCall(() => { + assert.strictEqual(constructed, true) + }) + ) + s.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) + }) + ) + s.destroy(new Error()) } } -testDestroy((opts) => new Readable({ - read: common.mustNotCall(), - ...opts -})); -testDestroy((opts) => new Writable({ - write: common.mustNotCall(), - final: common.mustNotCall(), - ...opts -})); +testDestroy( + (opts) => + new Readable({ + read: common.mustNotCall(), + ...opts + }) +) +testDestroy( + (opts) => + new Writable({ + write: common.mustNotCall(), + final: common.mustNotCall(), + ...opts + }) +) { - let constructed = false; + let constructed = false const r = new Readable({ autoDestroy: true, construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) }), read: common.mustCall(() => { - assert.strictEqual(constructed, true); - r.push(null); + assert.strictEqual(constructed, true) + r.push(null) + }) + }) + r.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) }) - }); - r.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - r.on('data', common.mustNotCall()); + ) + r.on('data', common.mustNotCall()) } - { - let constructed = false; + let constructed = false const w = new Writable({ autoDestroy: true, construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) }), write: common.mustCall((chunk, encoding, cb) => { - assert.strictEqual(constructed, true); - process.nextTick(cb); + assert.strictEqual(constructed, true) + process.nextTick(cb) }), final: common.mustCall((cb) => { - assert.strictEqual(constructed, true); - process.nextTick(cb); + assert.strictEqual(constructed, true) + process.nextTick(cb) + }) + }) + w.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) }) - }); - w.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - w.end('data'); + ) + w.end('data') } - { - let constructed = false; + let constructed = false const w = new Writable({ autoDestroy: true, construct: common.mustCall((cb) => { - constructed = true; - process.nextTick(cb); + constructed = true + process.nextTick(cb) }), write: common.mustNotCall(), final: common.mustCall((cb) => { - assert.strictEqual(constructed, true); - process.nextTick(cb); + assert.strictEqual(constructed, true) + process.nextTick(cb) + }) + }) + w.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) }) - }); - w.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); - w.end(); + ) + w.end() } - { new Duplex({ construct: common.mustCall() - }); + }) } - { // https://github.com/nodejs/node/issues/34448 - - let constructed = false; + let constructed = false const d = new Duplex({ readable: false, construct: common.mustCall((callback) => { - setImmediate(common.mustCall(() => { - constructed = true; - callback(); - })); + setImmediate( + common.mustCall(() => { + constructed = true + callback() + }) + ) }), + write(chunk, encoding, callback) { - callback(); + callback() }, + read() { - this.push(null); + this.push(null) } - }); - d.resume(); - d.end('foo'); - d.on('close', common.mustCall(() => { - assert.strictEqual(constructed, true); - })); + }) + d.resume() + d.end('foo') + d.on( + 'close', + common.mustCall(() => { + assert.strictEqual(constructed, true) + }) + ) } - { // Construct should not cause stream to read. new Readable({ construct: common.mustCall((callback) => { - callback(); + callback() }), read: common.mustNotCall() - }); + }) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-decoder-objectmode.js b/test/parallel/test-stream-decoder-objectmode.js index 96ae916cfb..32a5839a88 100644 --- a/test/parallel/test-stream-decoder-objectmode.js +++ b/test/parallel/test-stream-decoder-objectmode.js @@ -1,35 +1,37 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') const readable = new stream.Readable({ read: () => {}, encoding: 'utf16le', objectMode: true -}); - -readable.push(Buffer.from('abc', 'utf16le')); -readable.push(Buffer.from('def', 'utf16le')); -readable.push(null); - -// Without object mode, these would be concatenated into a single chunk. -assert.strictEqual(readable.read(), 'abc'); -assert.strictEqual(readable.read(), 'def'); -assert.strictEqual(readable.read(), null); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +readable.push(Buffer.from('abc', 'utf16le')) +readable.push(Buffer.from('def', 'utf16le')) +readable.push(null) // Without object mode, these would be concatenated into a single chunk. + +assert.strictEqual(readable.read(), 'abc') +assert.strictEqual(readable.read(), 'def') +assert.strictEqual(readable.read(), null) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-destroy-event-order.js b/test/parallel/test-stream-destroy-event-order.js index 4f5775449c..8086a1733f 100644 --- a/test/parallel/test-stream-destroy-event-order.js +++ b/test/parallel/test-stream-destroy-event-order.js @@ -1,39 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -const rs = new Readable({ - read() {} -}); - -let closed = false; -let errored = false; - -rs.on('close', common.mustCall(() => { - closed = true; - assert(errored); -})); +const assert = require('assert') -rs.on('error', common.mustCall((err) => { - errored = true; - assert(!closed); -})); +const { Readable } = require('../../lib/ours/index') -rs.destroy(new Error('kaboom')); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const rs = new Readable({ + read() {} +}) +let closed = false +let errored = false +rs.on( + 'close', + common.mustCall(() => { + closed = true + assert(errored) + }) +) +rs.on( + 'error', + common.mustCall((err) => { + errored = true + assert(!closed) + }) +) +rs.destroy(new Error('kaboom')) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js index d99c38f541..f919c70f52 100644 --- a/test/parallel/test-stream-drop-take.js +++ b/test/parallel/test-stream-drop-take.js @@ -1,117 +1,142 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Readable, -} = require('../../lib/ours/index'); -const { deepStrictEqual, rejects, throws } = require('assert'); + error() {} +} +const common = require('../common') -const { from } = Readable; +const { Readable } = require('../../lib/ours/index') -const fromAsync = (...args) => from(...args).map(async (x) => x); +const { deepStrictEqual, rejects, throws } = require('assert') -const naturals = () => from(async function*() { - let i = 1; - while (true) { - yield i++; - } -}()); +const { from } = Readable + +const fromAsync = (...args) => from(...args).map(async (x) => x) + +const naturals = () => + from( + (async function* () { + let i = 1 + + while (true) { + yield i++ + } + })() + ) { // Synchronous streams - (async () => { - deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]); - deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]); - deepStrictEqual(await from([]).drop(2).toArray(), []); - deepStrictEqual(await from([]).take(1).toArray(), []); - deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]); - deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]); - deepStrictEqual(await from([1, 2]).take(0).toArray(), []); - })().then(common.mustCall()); - // Asynchronous streams - (async () => { - deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]); - deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]); - deepStrictEqual(await fromAsync([]).drop(2).toArray(), []); - deepStrictEqual(await fromAsync([]).take(1).toArray(), []); - deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]); - deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]); - deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []); - })().then(common.mustCall()); - // Infinite streams + ;(async () => { + deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3]) + deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1]) + deepStrictEqual(await from([]).drop(2).toArray(), []) + deepStrictEqual(await from([]).take(1).toArray(), []) + deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2]) + deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]) + deepStrictEqual(await from([1, 2]).take(0).toArray(), []) + })().then(common.mustCall()) // Asynchronous streams + ;(async () => { + deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]) + deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]) + deepStrictEqual(await fromAsync([]).drop(2).toArray(), []) + deepStrictEqual(await fromAsync([]).take(1).toArray(), []) + deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2]) + deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2]) + deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []) + })().then(common.mustCall()) // Infinite streams // Asynchronous streams - (async () => { - deepStrictEqual(await naturals().take(1).toArray(), [1]); - deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]); - const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; - deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10); - deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]); - })().then(common.mustCall()); + ;(async () => { + deepStrictEqual(await naturals().take(1).toArray(), [1]) + deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]) + const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20] + deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10) + deepStrictEqual(await naturals().take(5).take(1).toArray(), [1]) + })().then(common.mustCall()) } - { // Coercion - (async () => { + ;(async () => { // The spec made me do this ^^ - deepStrictEqual(await naturals().take('cat').toArray(), []); - deepStrictEqual(await naturals().take('2').toArray(), [1, 2]); - deepStrictEqual(await naturals().take(true).toArray(), [1]); - })().then(common.mustCall()); + deepStrictEqual(await naturals().take('cat').toArray(), []) + deepStrictEqual(await naturals().take('2').toArray(), [1, 2]) + deepStrictEqual(await naturals().take(true).toArray(), [1]) + })().then(common.mustCall()) } - { // Support for AbortSignal - const ac = new AbortController(); + const ac = new AbortController() rejects( - Readable.from([1, 2, 3]).take(1, { signal: ac.signal }).toArray(), { - name: 'AbortError', - }).then(common.mustCall()); + Readable.from([1, 2, 3]) + .take(1, { + signal: ac.signal + }) + .toArray(), + { + name: 'AbortError' + } + ).then(common.mustCall()) rejects( - Readable.from([1, 2, 3]).drop(1, { signal: ac.signal }).toArray(), { - name: 'AbortError', - }).then(common.mustCall()); - ac.abort(); + Readable.from([1, 2, 3]) + .drop(1, { + signal: ac.signal + }) + .toArray(), + { + name: 'AbortError' + } + ).then(common.mustCall()) + ac.abort() } - { // Support for AbortSignal, already aborted - const signal = AbortSignal.abort(); + const signal = AbortSignal.abort() rejects( - Readable.from([1, 2, 3]).take(1, { signal }).toArray(), { - name: 'AbortError', - }).then(common.mustCall()); + Readable.from([1, 2, 3]) + .take(1, { + signal + }) + .toArray(), + { + name: 'AbortError' + } + ).then(common.mustCall()) } - { // Error cases - const invalidArgs = [ - -1, - -Infinity, - -40, - ]; + const invalidArgs = [-1, -Infinity, -40] for (const example of invalidArgs) { - throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/); + throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/) } - throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/); - throws(() => Readable.from([1]).drop(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); - - throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/); - throws(() => Readable.from([1]).take(1, { signal: true }), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/) + throws( + () => + Readable.from([1]).drop(1, { + signal: true + }), + /ERR_INVALID_ARG_TYPE/ + ) + throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/) + throws( + () => + Readable.from([1]).take(1, { + signal: true + }), + /ERR_INVALID_ARG_TYPE/ + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js index 6b6b5e9a42..67dd7c80dc 100644 --- a/test/parallel/test-stream-duplex-destroy.js +++ b/test/parallel/test-stream-duplex-destroy.js @@ -1,272 +1,293 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Duplex } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -{ - const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {} - }); +const { Duplex } = require('../../lib/ours/index') - duplex.resume(); +const assert = require('assert') - duplex.on('end', common.mustNotCall()); - duplex.on('finish', common.mustNotCall()); - duplex.on('close', common.mustCall()); +{ + const duplex = new Duplex({ + write(chunk, enc, cb) { + cb() + }, - duplex.destroy(); - assert.strictEqual(duplex.destroyed, true); + read() {} + }) + duplex.resume() + duplex.on('end', common.mustNotCall()) + duplex.on('finish', common.mustNotCall()) + duplex.on('close', common.mustCall()) + duplex.destroy() + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {} - }); - duplex.resume(); - - const expected = new Error('kaboom'); + write(chunk, enc, cb) { + cb() + }, - duplex.on('end', common.mustNotCall()); - duplex.on('finish', common.mustNotCall()); - duplex.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - duplex.destroy(expected); - assert.strictEqual(duplex.destroyed, true); + read() {} + }) + duplex.resume() + const expected = new Error('kaboom') + duplex.on('end', common.mustNotCall()) + duplex.on('finish', common.mustNotCall()) + duplex.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + duplex.destroy(expected) + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {} - }); + write(chunk, enc, cb) { + cb() + }, - duplex._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(err); - }); - - const expected = new Error('kaboom'); - - duplex.on('finish', common.mustNotCall('no finish event')); - duplex.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - duplex.destroy(expected); - assert.strictEqual(duplex.destroyed, true); + read() {} + }) + duplex._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb(err) + }) + const expected = new Error('kaboom') + duplex.on('finish', common.mustNotCall('no finish event')) + duplex.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + duplex.destroy(expected) + assert.strictEqual(duplex.destroyed, true) } - { - const expected = new Error('kaboom'); + const expected = new Error('kaboom') const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {}, - destroy: common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(); - }) - }); - duplex.resume(); - - duplex.on('end', common.mustNotCall('no end event')); - duplex.on('finish', common.mustNotCall('no finish event')); + write(chunk, enc, cb) { + cb() + }, - // Error is swallowed by the custom _destroy - duplex.on('error', common.mustNotCall('no error event')); - duplex.on('close', common.mustCall()); + read() {}, - duplex.destroy(expected); - assert.strictEqual(duplex.destroyed, true); + destroy: common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb() + }) + }) + duplex.resume() + duplex.on('end', common.mustNotCall('no end event')) + duplex.on('finish', common.mustNotCall('no finish event')) // Error is swallowed by the custom _destroy + + duplex.on('error', common.mustNotCall('no error event')) + duplex.on('close', common.mustCall()) + duplex.destroy(expected) + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {} - }); - - duplex._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(); - }); + write(chunk, enc, cb) { + cb() + }, - duplex.destroy(); - assert.strictEqual(duplex.destroyed, true); + read() {} + }) + duplex._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb() + }) + duplex.destroy() + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {} - }); - duplex.resume(); + write(chunk, enc, cb) { + cb() + }, - duplex._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); + read() {} + }) + duplex.resume() + duplex._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) process.nextTick(() => { - this.push(null); - this.end(); - cb(); - }); - }); - - const fail = common.mustNotCall('no finish or end event'); - - duplex.on('finish', fail); - duplex.on('end', fail); - - duplex.destroy(); - - duplex.removeListener('end', fail); - duplex.removeListener('finish', fail); - duplex.on('end', common.mustNotCall()); - duplex.on('finish', common.mustNotCall()); - assert.strictEqual(duplex.destroyed, true); + this.push(null) + this.end() + cb() + }) + }) + const fail = common.mustNotCall('no finish or end event') + duplex.on('finish', fail) + duplex.on('end', fail) + duplex.destroy() + duplex.removeListener('end', fail) + duplex.removeListener('finish', fail) + duplex.on('end', common.mustNotCall()) + duplex.on('finish', common.mustNotCall()) + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {} - }); - - const expected = new Error('kaboom'); - - duplex._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(expected); - }); - - duplex.on('finish', common.mustNotCall('no finish event')); - duplex.on('end', common.mustNotCall('no end event')); - duplex.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); + write(chunk, enc, cb) { + cb() + }, - duplex.destroy(); - assert.strictEqual(duplex.destroyed, true); + read() {} + }) + const expected = new Error('kaboom') + duplex._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb(expected) + }) + duplex.on('finish', common.mustNotCall('no finish event')) + duplex.on('end', common.mustNotCall('no end event')) + duplex.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + duplex.destroy() + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {}, - allowHalfOpen: true - }); - duplex.resume(); + write(chunk, enc, cb) { + cb() + }, - duplex.on('finish', common.mustNotCall()); - duplex.on('end', common.mustNotCall()); + read() {}, - duplex.destroy(); - assert.strictEqual(duplex.destroyed, true); + allowHalfOpen: true + }) + duplex.resume() + duplex.on('finish', common.mustNotCall()) + duplex.on('end', common.mustNotCall()) + duplex.destroy() + assert.strictEqual(duplex.destroyed, true) } - { const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, - read() {}, - }); + write(chunk, enc, cb) { + cb() + }, - duplex.destroyed = true; - assert.strictEqual(duplex.destroyed, true); + read() {} + }) + duplex.destroyed = true + assert.strictEqual(duplex.destroyed, true) // The internal destroy() mechanism should not be triggered - // The internal destroy() mechanism should not be triggered - duplex.on('finish', common.mustNotCall()); - duplex.on('end', common.mustNotCall()); - duplex.destroy(); + duplex.on('finish', common.mustNotCall()) + duplex.on('end', common.mustNotCall()) + duplex.destroy() } - { function MyDuplex() { - assert.strictEqual(this.destroyed, false); - this.destroyed = false; - Duplex.call(this); + assert.strictEqual(this.destroyed, false) + this.destroyed = false + Duplex.call(this) } - Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype); - Object.setPrototypeOf(MyDuplex, Duplex); - - new MyDuplex(); + Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype) + Object.setPrototypeOf(MyDuplex, Duplex) + new MyDuplex() } - { const duplex = new Duplex({ writable: false, autoDestroy: true, - write(chunk, enc, cb) { cb(); }, - read() {}, - }); - duplex.push(null); - duplex.resume(); - duplex.on('close', common.mustCall()); -} + write(chunk, enc, cb) { + cb() + }, + + read() {} + }) + duplex.push(null) + duplex.resume() + duplex.on('close', common.mustCall()) +} { const duplex = new Duplex({ readable: false, autoDestroy: true, - write(chunk, enc, cb) { cb(); }, - read() {}, - }); - duplex.end(); - duplex.on('close', common.mustCall()); -} + write(chunk, enc, cb) { + cb() + }, + + read() {} + }) + duplex.end() + duplex.on('close', common.mustCall()) +} { const duplex = new Duplex({ allowHalfOpen: false, autoDestroy: true, - write(chunk, enc, cb) { cb(); }, - read() {}, - }); - duplex.push(null); - duplex.resume(); - const orgEnd = duplex.end; - duplex.end = common.mustNotCall(); + + write(chunk, enc, cb) { + cb() + }, + + read() {} + }) + duplex.push(null) + duplex.resume() + const orgEnd = duplex.end + duplex.end = common.mustNotCall() duplex.on('end', () => { // Ensure end() is called in next tick to allow // any pending writes to be invoked first. process.nextTick(() => { - duplex.end = common.mustCall(orgEnd); - }); - }); - duplex.on('close', common.mustCall()); + duplex.end = common.mustCall(orgEnd) + }) + }) + duplex.on('close', common.mustCall()) } { // Check abort signal - const controller = new AbortController(); - const { signal } = controller; + const controller = new AbortController() + const { signal } = controller const duplex = new Duplex({ - write(chunk, enc, cb) { cb(); }, + write(chunk, enc, cb) { + cb() + }, + read() {}, - signal, - }); - let count = 0; - duplex.on('error', common.mustCall((e) => { - assert.strictEqual(count++, 0); // Ensure not called twice - assert.strictEqual(e.name, 'AbortError'); - })); - duplex.on('close', common.mustCall()); - controller.abort(); + + signal + }) + let count = 0 + duplex.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(count++, 0) // Ensure not called twice + + assert.strictEqual(e.name, 'AbortError') + }) + ) + duplex.on('close', common.mustCall()) + controller.abort() } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-end.js b/test/parallel/test-stream-duplex-end.js index 5c10daaa59..743caed878 100644 --- a/test/parallel/test-stream-duplex-end.js +++ b/test/parallel/test-stream-duplex-end.js @@ -1,56 +1,60 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const Duplex = require('../../lib/ours/index').Duplex; + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const Duplex = require('../../lib/ours/index').Duplex { const stream = new Duplex({ read() {} - }); - assert.strictEqual(stream.allowHalfOpen, true); - stream.on('finish', common.mustNotCall()); - assert.strictEqual(stream.listenerCount('end'), 0); - stream.resume(); - stream.push(null); + }) + assert.strictEqual(stream.allowHalfOpen, true) + stream.on('finish', common.mustNotCall()) + assert.strictEqual(stream.listenerCount('end'), 0) + stream.resume() + stream.push(null) } - { const stream = new Duplex({ read() {}, + allowHalfOpen: false - }); - assert.strictEqual(stream.allowHalfOpen, false); - stream.on('finish', common.mustCall()); - assert.strictEqual(stream.listenerCount('end'), 0); - stream.resume(); - stream.push(null); + }) + assert.strictEqual(stream.allowHalfOpen, false) + stream.on('finish', common.mustCall()) + assert.strictEqual(stream.listenerCount('end'), 0) + stream.resume() + stream.push(null) } - { const stream = new Duplex({ read() {}, + allowHalfOpen: false - }); - assert.strictEqual(stream.allowHalfOpen, false); - stream._writableState.ended = true; - stream.on('finish', common.mustNotCall()); - assert.strictEqual(stream.listenerCount('end'), 0); - stream.resume(); - stream.push(null); + }) + assert.strictEqual(stream.allowHalfOpen, false) + stream._writableState.ended = true + stream.on('finish', common.mustNotCall()) + assert.strictEqual(stream.listenerCount('end'), 0) + stream.resume() + stream.push(null) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js index b63965f43d..8eff213a46 100644 --- a/test/parallel/test-stream-duplex-from.js +++ b/test/parallel/test-stream-duplex-from.js @@ -1,295 +1,350 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Duplex, Readable, Writable, pipeline } = require('../../lib/ours/index'); -const { Blob } = require('buffer'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Duplex, Readable, Writable, pipeline } = require('../../lib/ours/index') + +let { Blob } = require('buffer') + +if (typeof Blob === 'undefined') { + Blob = require('blob-polyfill').Blob +} { const d = Duplex.from({ readable: new Readable({ read() { - this.push('asd'); - this.push(null); + this.push('asd') + this.push(null) } }) - }); - assert.strictEqual(d.readable, true); - assert.strictEqual(d.writable, false); - d.once('readable', common.mustCall(function() { - assert.strictEqual(d.read().toString(), 'asd'); - })); - d.once('end', common.mustCall(function() { - assert.strictEqual(d.readable, false); - })); + }) + assert.strictEqual(d.readable, true) + assert.strictEqual(d.writable, false) + d.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(d.read().toString(), 'asd') + }) + ) + d.once( + 'end', + common.mustCall(function () { + assert.strictEqual(d.readable, false) + }) + ) } - { - const d = Duplex.from(new Readable({ - read() { - this.push('asd'); - this.push(null); - } - })); - assert.strictEqual(d.readable, true); - assert.strictEqual(d.writable, false); - d.once('readable', common.mustCall(function() { - assert.strictEqual(d.read().toString(), 'asd'); - })); - d.once('end', common.mustCall(function() { - assert.strictEqual(d.readable, false); - })); + const d = Duplex.from( + new Readable({ + read() { + this.push('asd') + this.push(null) + } + }) + ) + assert.strictEqual(d.readable, true) + assert.strictEqual(d.writable, false) + d.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(d.read().toString(), 'asd') + }) + ) + d.once( + 'end', + common.mustCall(function () { + assert.strictEqual(d.readable, false) + }) + ) } - { - let ret = ''; - const d = Duplex.from(new Writable({ - write(chunk, encoding, callback) { - ret += chunk; - callback(); - } - })); - assert.strictEqual(d.readable, false); - assert.strictEqual(d.writable, true); - d.end('asd'); - d.on('finish', common.mustCall(function() { - assert.strictEqual(d.writable, false); - assert.strictEqual(ret, 'asd'); - })); + let ret = '' + const d = Duplex.from( + new Writable({ + write(chunk, encoding, callback) { + ret += chunk + callback() + } + }) + ) + assert.strictEqual(d.readable, false) + assert.strictEqual(d.writable, true) + d.end('asd') + d.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(d.writable, false) + assert.strictEqual(ret, 'asd') + }) + ) } - { - let ret = ''; + let ret = '' const d = Duplex.from({ writable: new Writable({ write(chunk, encoding, callback) { - ret += chunk; - callback(); + ret += chunk + callback() } }) - }); - assert.strictEqual(d.readable, false); - assert.strictEqual(d.writable, true); - d.end('asd'); - d.on('finish', common.mustCall(function() { - assert.strictEqual(d.writable, false); - assert.strictEqual(ret, 'asd'); - })); + }) + assert.strictEqual(d.readable, false) + assert.strictEqual(d.writable, true) + d.end('asd') + d.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(d.writable, false) + assert.strictEqual(ret, 'asd') + }) + ) } - { - let ret = ''; + let ret = '' const d = Duplex.from({ readable: new Readable({ read() { - this.push('asd'); - this.push(null); + this.push('asd') + this.push(null) } }), writable: new Writable({ write(chunk, encoding, callback) { - ret += chunk; - callback(); + ret += chunk + callback() } }) - }); - assert.strictEqual(d.readable, true); - assert.strictEqual(d.writable, true); - d.once('readable', common.mustCall(function() { - assert.strictEqual(d.read().toString(), 'asd'); - })); - d.once('end', common.mustCall(function() { - assert.strictEqual(d.readable, false); - })); - d.end('asd'); - d.once('finish', common.mustCall(function() { - assert.strictEqual(d.writable, false); - assert.strictEqual(ret, 'asd'); - })); + }) + assert.strictEqual(d.readable, true) + assert.strictEqual(d.writable, true) + d.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(d.read().toString(), 'asd') + }) + ) + d.once( + 'end', + common.mustCall(function () { + assert.strictEqual(d.readable, false) + }) + ) + d.end('asd') + d.once( + 'finish', + common.mustCall(function () { + assert.strictEqual(d.writable, false) + assert.strictEqual(ret, 'asd') + }) + ) } - { - const d = Duplex.from(Promise.resolve('asd')); - assert.strictEqual(d.readable, true); - assert.strictEqual(d.writable, false); - d.once('readable', common.mustCall(function() { - assert.strictEqual(d.read().toString(), 'asd'); - })); - d.once('end', common.mustCall(function() { - assert.strictEqual(d.readable, false); - })); + const d = Duplex.from(Promise.resolve('asd')) + assert.strictEqual(d.readable, true) + assert.strictEqual(d.writable, false) + d.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(d.read().toString(), 'asd') + }) + ) + d.once( + 'end', + common.mustCall(function () { + assert.strictEqual(d.readable, false) + }) + ) } - { // https://github.com/nodejs/node/issues/40497 pipeline( ['abc\ndef\nghi'], - Duplex.from(async function * (source) { - let rest = ''; + Duplex.from(async function* (source) { + let rest = '' + for await (const chunk of source) { - const lines = (rest + chunk.toString()).split('\n'); - rest = lines.pop(); + const lines = (rest + chunk.toString()).split('\n') + rest = lines.pop() + for (const line of lines) { - yield line; + yield line } } - yield rest; + + yield rest }), - async function * (source) { // eslint-disable-line require-yield - let ret = ''; + async function* (source) { + // eslint-disable-line require-yield + let ret = '' + for await (const x of source) { - ret += x; + ret += x } - assert.strictEqual(ret, 'abcdefghi'); + + assert.strictEqual(ret, 'abcdefghi') }, - common.mustCall(() => {}), - ); -} + common.mustCall(() => {}) + ) +} // Ensure that isDuplexNodeStream was called -// Ensure that isDuplexNodeStream was called { - const duplex = new Duplex(); - assert.strictEqual(Duplex.from(duplex), duplex); -} + const duplex = new Duplex() + assert.strictEqual(Duplex.from(duplex), duplex) +} // Ensure that Duplex.from works for blobs -// Ensure that Duplex.from works for blobs { - const blob = new Blob(['blob']); - const expectedByteLength = blob.size; - const duplex = Duplex.from(blob); - duplex.on('data', common.mustCall((arrayBuffer) => { - assert.strictEqual(arrayBuffer.byteLength, expectedByteLength); - })); -} + const blob = new Blob(['blob']) + const expectedByteLength = blob.size + const duplex = Duplex.from(blob) + duplex.on( + 'data', + common.mustCall((arrayBuffer) => { + assert.strictEqual(arrayBuffer.byteLength, expectedByteLength) + }) + ) +} // Ensure that given a promise rejection it emits an error -// Ensure that given a promise rejection it emits an error { - const myErrorMessage = 'myCustomError'; - Duplex.from(Promise.reject(myErrorMessage)) - .on('error', common.mustCall((error) => { - assert.strictEqual(error, myErrorMessage); - })); -} + const myErrorMessage = 'myCustomError' + Duplex.from(Promise.reject(myErrorMessage)).on( + 'error', + common.mustCall((error) => { + assert.strictEqual(error, myErrorMessage) + }) + ) +} // Ensure that given a promise rejection on an async function it emits an error -// Ensure that given a promise rejection on an async function it emits an error { - const myErrorMessage = 'myCustomError'; + const myErrorMessage = 'myCustomError' + async function asyncFn() { - return Promise.reject(myErrorMessage); + return Promise.reject(myErrorMessage) } - Duplex.from(asyncFn) - .on('error', common.mustCall((error) => { - assert.strictEqual(error, myErrorMessage); - })); -} + Duplex.from(asyncFn).on( + 'error', + common.mustCall((error) => { + assert.strictEqual(error, myErrorMessage) + }) + ) +} // Ensure that Duplex.from throws an Invalid return value when function is void -// Ensure that Duplex.from throws an Invalid return value when function is void { assert.throws(() => Duplex.from(() => {}), { - code: 'ERR_INVALID_RETURN_VALUE', - }); -} + code: 'ERR_INVALID_RETURN_VALUE' + }) +} // Ensure data if a sub object has a readable stream it's duplexified -// Ensure data if a sub object has a readable stream it's duplexified { - const msg = Buffer.from('hello'); + const msg = Buffer.from('hello') const duplex = Duplex.from({ readable: Readable({ read() { - this.push(msg); - this.push(null); + this.push(msg) + this.push(null) } }) - }).on('data', common.mustCall((data) => { - assert.strictEqual(data, msg); - })); - - assert.strictEqual(duplex.writable, false); -} + }).on( + 'data', + common.mustCall((data) => { + assert.strictEqual(data, msg) + }) + ) + assert.strictEqual(duplex.writable, false) +} // Ensure data if a sub object has a writable stream it's duplexified -// Ensure data if a sub object has a writable stream it's duplexified { - const msg = Buffer.from('hello'); + const msg = Buffer.from('hello') const duplex = Duplex.from({ writable: Writable({ write: common.mustCall((data) => { - assert.strictEqual(data, msg); + assert.strictEqual(data, msg) }) }) - }); - - duplex.write(msg); - assert.strictEqual(duplex.readable, false); -} + }) + duplex.write(msg) + assert.strictEqual(duplex.readable, false) +} // Ensure data if a sub object has a writable and readable stream it's duplexified -// Ensure data if a sub object has a writable and readable stream it's duplexified { - const msg = Buffer.from('hello'); - + const msg = Buffer.from('hello') const duplex = Duplex.from({ readable: Readable({ read() { - this.push(msg); - this.push(null); + this.push(msg) + this.push(null) } }), writable: Writable({ write: common.mustCall((data) => { - assert.strictEqual(data, msg); + assert.strictEqual(data, msg) }) }) - }); - - duplex.pipe(duplex) - .on('data', common.mustCall((data) => { - assert.strictEqual(data, msg); - assert.strictEqual(duplex.readable, true); - assert.strictEqual(duplex.writable, true); - })) - .on('end', common.mustCall()); -} + }) + duplex + .pipe(duplex) + .on( + 'data', + common.mustCall((data) => { + assert.strictEqual(data, msg) + assert.strictEqual(duplex.readable, true) + assert.strictEqual(duplex.writable, true) + }) + ) + .on('end', common.mustCall()) +} // Ensure that given readable stream that throws an error it calls destroy -// Ensure that given readable stream that throws an error it calls destroy { - const myErrorMessage = 'error!'; - const duplex = Duplex.from(Readable({ - read() { - throw new Error(myErrorMessage); - } - })); - duplex.on('error', common.mustCall((msg) => { - assert.strictEqual(msg.message, myErrorMessage); - })); -} + const myErrorMessage = 'error!' + const duplex = Duplex.from( + Readable({ + read() { + throw new Error(myErrorMessage) + } + }) + ) + duplex.on( + 'error', + common.mustCall((msg) => { + assert.strictEqual(msg.message, myErrorMessage) + }) + ) +} // Ensure that given writable stream that throws an error it calls destroy -// Ensure that given writable stream that throws an error it calls destroy { - const myErrorMessage = 'error!'; - const duplex = Duplex.from(Writable({ - write(chunk, enc, cb) { - cb(myErrorMessage); - } - })); - - duplex.on('error', common.mustCall((msg) => { - assert.strictEqual(msg, myErrorMessage); - })); - - duplex.write('test'); + const myErrorMessage = 'error!' + const duplex = Duplex.from( + Writable({ + write(chunk, enc, cb) { + cb(myErrorMessage) + } + }) + ) + duplex.on( + 'error', + common.mustCall((msg) => { + assert.strictEqual(msg, myErrorMessage) + }) + ) + duplex.write('test') } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-props.js b/test/parallel/test-stream-duplex-props.js index 4680d355fd..ed2528204a 100644 --- a/test/parallel/test-stream-duplex-props.js +++ b/test/parallel/test-stream-duplex-props.js @@ -1,46 +1,47 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); -const assert = require('assert'); -const { Duplex } = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') + +const { Duplex } = require('../../lib/ours/index') { const d = new Duplex({ objectMode: true, highWaterMark: 100 - }); - - assert.strictEqual(d.writableObjectMode, true); - assert.strictEqual(d.writableHighWaterMark, 100); - assert.strictEqual(d.readableObjectMode, true); - assert.strictEqual(d.readableHighWaterMark, 100); + }) + assert.strictEqual(d.writableObjectMode, true) + assert.strictEqual(d.writableHighWaterMark, 100) + assert.strictEqual(d.readableObjectMode, true) + assert.strictEqual(d.readableHighWaterMark, 100) } - { const d = new Duplex({ readableObjectMode: false, readableHighWaterMark: 10, writableObjectMode: true, writableHighWaterMark: 100 - }); - - assert.strictEqual(d.writableObjectMode, true); - assert.strictEqual(d.writableHighWaterMark, 100); - assert.strictEqual(d.readableObjectMode, false); - assert.strictEqual(d.readableHighWaterMark, 10); + }) + assert.strictEqual(d.writableObjectMode, true) + assert.strictEqual(d.writableHighWaterMark, 100) + assert.strictEqual(d.readableObjectMode, false) + assert.strictEqual(d.readableHighWaterMark, 10) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-readable-end.js b/test/parallel/test-stream-duplex-readable-end.js index 60012c4ba7..1fd1e8966a 100644 --- a/test/parallel/test-stream-duplex-readable-end.js +++ b/test/parallel/test-stream-duplex-readable-end.js @@ -1,44 +1,46 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -// https://github.com/nodejs/node/issues/35926 -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -let loops = 5; + error() {} +} // https://github.com/nodejs/node/issues/35926 +const common = require('../common') +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +let loops = 5 const src = new stream.Readable({ read() { - if (loops--) - this.push(Buffer.alloc(20000)); + if (loops--) this.push(Buffer.alloc(20000)) } -}); - +}) const dst = new stream.Transform({ transform(chunk, output, fn) { - this.push(null); - fn(); + this.push(null) + fn() } -}); - -src.pipe(dst); - -dst.on('data', () => { }); -dst.on('end', common.mustCall(() => { - assert.strictEqual(loops, 3); - assert.ok(src.isPaused()); -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +src.pipe(dst) +dst.on('data', () => {}) +dst.on( + 'end', + common.mustCall(() => { + assert.strictEqual(loops, 3) + assert.ok(src.isPaused()) + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-readable-writable.js b/test/parallel/test-stream-duplex-readable-writable.js index 78655e043a..ba752d9298 100644 --- a/test/parallel/test-stream-duplex-readable-writable.js +++ b/test/parallel/test-stream-duplex-readable-writable.js @@ -1,61 +1,71 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Duplex } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Duplex } = require('../../lib/ours/index') + +const assert = require('assert') { const duplex = new Duplex({ readable: false - }); - assert.strictEqual(duplex.readable, false); - duplex.push('asd'); - duplex.on('error', common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PUSH_AFTER_EOF'); - })); - duplex.on('data', common.mustNotCall()); - duplex.on('end', common.mustNotCall()); + }) + assert.strictEqual(duplex.readable, false) + duplex.push('asd') + duplex.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PUSH_AFTER_EOF') + }) + ) + duplex.on('data', common.mustNotCall()) + duplex.on('end', common.mustNotCall()) } - { const duplex = new Duplex({ writable: false, write: common.mustNotCall() - }); - assert.strictEqual(duplex.writable, false); - duplex.write('asd'); - duplex.on('error', common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); - })); - duplex.on('finish', common.mustNotCall()); + }) + assert.strictEqual(duplex.writable, false) + duplex.write('asd') + duplex.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END') + }) + ) + duplex.on('finish', common.mustNotCall()) } - { const duplex = new Duplex({ readable: false - }); - assert.strictEqual(duplex.readable, false); - duplex.on('data', common.mustNotCall()); - duplex.on('end', common.mustNotCall()); + }) + assert.strictEqual(duplex.readable, false) + duplex.on('data', common.mustNotCall()) + duplex.on('end', common.mustNotCall()) + async function run() { for await (const chunk of duplex) { - assert(false, chunk); + assert(false, chunk) } } - run().then(common.mustCall()); + + run().then(common.mustCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex-writable-finished.js b/test/parallel/test-stream-duplex-writable-finished.js index 715af20acd..e84abe9cfb 100644 --- a/test/parallel/test-stream-duplex-writable-finished.js +++ b/test/parallel/test-stream-duplex-writable-finished.js @@ -1,45 +1,52 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Duplex } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Duplex } = require('../../lib/ours/index') + +const assert = require('assert') // basic -// basic { // Find it on Duplex.prototype - assert(Reflect.has(Duplex.prototype, 'writableFinished')); -} + assert(Reflect.has(Duplex.prototype, 'writableFinished')) +} // event -// event { - const duplex = new Duplex(); + const duplex = new Duplex() duplex._write = (chunk, encoding, cb) => { // The state finished should start in false. - assert.strictEqual(duplex.writableFinished, false); - cb(); - }; - - duplex.on('finish', common.mustCall(() => { - assert.strictEqual(duplex.writableFinished, true); - })); - - duplex.end('testing finished state', common.mustCall(() => { - assert.strictEqual(duplex.writableFinished, true); - })); + assert.strictEqual(duplex.writableFinished, false) + cb() + } + + duplex.on( + 'finish', + common.mustCall(() => { + assert.strictEqual(duplex.writableFinished, true) + }) + ) + duplex.end( + 'testing finished state', + common.mustCall(() => { + assert.strictEqual(duplex.writableFinished, true) + }) + ) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js index fc5b5eee31..0c44a8cfce 100644 --- a/test/parallel/test-stream-duplex.js +++ b/test/parallel/test-stream-duplex.js @@ -18,53 +18,59 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); -const assert = require('assert'); -const Duplex = require('../../lib/ours/index').Duplex; + error() {} +} +require('../common') -const stream = new Duplex({ objectMode: true }); +const assert = require('assert') -assert(Duplex() instanceof Duplex); -assert(stream._readableState.objectMode); -assert(stream._writableState.objectMode); -assert(stream.allowHalfOpen); -assert.strictEqual(stream.listenerCount('end'), 0); +const Duplex = require('../../lib/ours/index').Duplex -let written; -let read; +const stream = new Duplex({ + objectMode: true +}) +assert(Duplex() instanceof Duplex) +assert(stream._readableState.objectMode) +assert(stream._writableState.objectMode) +assert(stream.allowHalfOpen) +assert.strictEqual(stream.listenerCount('end'), 0) +let written +let read stream._write = (obj, _, cb) => { - written = obj; - cb(); -}; + written = obj + cb() +} -stream._read = () => {}; +stream._read = () => {} stream.on('data', (obj) => { - read = obj; -}); - -stream.push({ val: 1 }); -stream.end({ val: 2 }); - + read = obj +}) +stream.push({ + val: 1 +}) +stream.end({ + val: 2 +}) process.on('exit', () => { - assert.strictEqual(read.val, 1); - assert.strictEqual(written.val, 2); -}); + assert.strictEqual(read.val, 1) + assert.strictEqual(written.val, 2) +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-end-of-streams.js b/test/parallel/test-stream-end-of-streams.js index b4207da37a..f731a4d0e8 100644 --- a/test/parallel/test-stream-end-of-streams.js +++ b/test/parallel/test-stream-end-of-streams.js @@ -1,35 +1,40 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Duplex, finished } = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') + +const { Duplex, finished } = require('../../lib/ours/index') assert.throws( () => { // Passing empty object to mock invalid stream // should throw error - finished({}, () => {}); + finished({}, () => {}) }, - { code: 'ERR_INVALID_ARG_TYPE' } -); - -const streamObj = new Duplex(); -streamObj.end(); -// Below code should not throw any errors as the + { + code: 'ERR_INVALID_ARG_TYPE' + } +) +const streamObj = new Duplex() +streamObj.end() // Below code should not throw any errors as the // streamObj is `Stream` -finished(streamObj, () => {}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +finished(streamObj, () => {}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js index b9ec00a39b..f1478d685b 100644 --- a/test/parallel/test-stream-end-paused.js +++ b/test/parallel/test-stream-end-paused.js @@ -18,48 +18,52 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -// Make sure we don't miss the end event for paused 0-length streams +const assert = require('assert') // Make sure we don't miss the end event for paused 0-length streams -const Readable = require('../../lib/ours/index').Readable; -const stream = new Readable(); -let calledRead = false; -stream._read = function() { - assert(!calledRead); - calledRead = true; - this.push(null); -}; +const Readable = require('../../lib/ours/index').Readable -stream.on('data', function() { - throw new Error('should not ever get data'); -}); -stream.pause(); +const stream = new Readable() +let calledRead = false -setTimeout(common.mustCall(function() { - stream.on('end', common.mustCall()); - stream.resume(); -}), 1); +stream._read = function () { + assert(!calledRead) + calledRead = true + this.push(null) +} -process.on('exit', function() { - assert(calledRead); - silentConsole.log('ok'); -}); +stream.on('data', function () { + throw new Error('should not ever get data') +}) +stream.pause() +setTimeout( + common.mustCall(function () { + stream.on('end', common.mustCall()) + stream.resume() + }), + 1 +) +process.on('exit', function () { + assert(calledRead) + silentConsole.log('ok') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-error-once.js b/test/parallel/test-stream-error-once.js index f69bd88388..435f9b0b9d 100644 --- a/test/parallel/test-stream-error-once.js +++ b/test/parallel/test-stream-error-once.js @@ -1,34 +1,37 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Writable, Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -{ - const writable = new Writable(); - writable.on('error', common.mustCall()); - writable.end(); - writable.write('h'); - writable.write('h'); + error() {} } +const common = require('../common') + +const { Writable, Readable } = require('../../lib/ours/index') { - const readable = new Readable(); - readable.on('error', common.mustCall()); - readable.push(null); - readable.push('h'); - readable.push('h'); + const writable = new Writable() + writable.on('error', common.mustCall()) + writable.end() + writable.write('h') + writable.write('h') +} +{ + const readable = new Readable() + readable.on('error', common.mustCall()) + readable.push(null) + readable.push('h') + readable.push('h') } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-events-prepend.js b/test/parallel/test-stream-events-prepend.js index 4996de08a9..861887c7db 100644 --- a/test/parallel/test-stream-events-prepend.js +++ b/test/parallel/test-stream-events-prepend.js @@ -1,41 +1,44 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') class Writable extends stream.Writable { constructor() { - super(); - this.prependListener = undefined; + super() + this.prependListener = undefined } _write(chunk, end, cb) { - cb(); + cb() } } class Readable extends stream.Readable { _read() { - this.push(null); + this.push(null) } } -const w = new Writable(); -w.on('pipe', common.mustCall()); - -const r = new Readable(); -r.pipe(w); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const w = new Writable() +w.on('pipe', common.mustCall()) +const r = new Readable() +r.pipe(w) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js index aad1bcd051..5ed6fb1046 100644 --- a/test/parallel/test-stream-filter.js +++ b/test/parallel/test-stream-filter.js @@ -1,199 +1,220 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Readable, -} = require('../../lib/ours/index'); -const assert = require('assert'); -const { once } = require('events'); + error() {} +} +const common = require('../common') - const st = require('timers').setTimeout; +const { Readable } = require('../../lib/ours/index') - function setTimeout(ms) { - return new Promise(resolve => { - st(resolve, ms); - }); - } - +const assert = require('assert') + +const { once } = require('events') + +const st = require('timers').setTimeout + +function setTimeout(ms) { + return new Promise((resolve) => { + st(resolve, ms) + }) +} { // Filter works on synchronous streams with a synchronous predicate - const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => x < 3); - const result = [1, 2]; - (async () => { + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => x < 3) + const result = [1, 2] + ;(async () => { for await (const item of stream) { - assert.strictEqual(item, result.shift()); + assert.strictEqual(item, result.shift()) } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Filter works on synchronous streams with an asynchronous predicate const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => { - await Promise.resolve(); - return x > 3; - }); - const result = [4, 5]; - (async () => { + await Promise.resolve() + return x > 3 + }) + const result = [4, 5] + ;(async () => { for await (const item of stream) { - assert.strictEqual(item, result.shift()); + assert.strictEqual(item, result.shift()) } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Map works on asynchronous streams with a asynchronous mapper - const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { - await Promise.resolve(); - return x + x; - }).filter((x) => x > 5); - const result = [6, 8, 10]; - (async () => { + const stream = Readable.from([1, 2, 3, 4, 5]) + .map(async (x) => { + await Promise.resolve() + return x + x + }) + .filter((x) => x > 5) + const result = [6, 8, 10] + ;(async () => { for await (const item of stream) { - assert.strictEqual(item, result.shift()); + assert.strictEqual(item, result.shift()) } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Filter works on an infinite stream - const stream = Readable.from(async function* () { - while (true) yield 1; - }()).filter(common.mustCall(async (x) => { - return x < 3; - }, 5)); - (async () => { - let i = 1; + const stream = Readable.from( + (async function* () { + while (true) yield 1 + })() + ).filter( + common.mustCall(async (x) => { + return x < 3 + }, 5) + ) + ;(async () => { + let i = 1 + for await (const item of stream) { - assert.strictEqual(item, 1); - if (++i === 5) break; + assert.strictEqual(item, 1) + if (++i === 5) break } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Filter works on constructor created streams - let i = 0; + let i = 0 const stream = new Readable({ read() { if (i === 10) { - this.push(null); - return; + this.push(null) + return } - this.push(Uint8Array.from([i])); - i++; + + this.push(Uint8Array.from([i])) + i++ }, - highWaterMark: 0, - }).filter(common.mustCall(async ([x]) => { - return x !== 5; - }, 10)); - (async () => { - const result = (await stream.toArray()).map((x) => x[0]); - const expected = [...Array(10).keys()].filter((x) => x !== 5); - assert.deepStrictEqual(result, expected); - })().then(common.mustCall()); -} + highWaterMark: 0 + }).filter( + common.mustCall(async ([x]) => { + return x !== 5 + }, 10) + ) + ;(async () => { + const result = (await stream.toArray()).map((x) => x[0]) + const expected = [...Array(10).keys()].filter((x) => x !== 5) + assert.deepStrictEqual(result, expected) + })().then(common.mustCall()) +} { // Throwing an error during `filter` (sync) const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => { if (x === 3) { - throw new Error('boom'); + throw new Error('boom') } - return true; - }); - assert.rejects( - stream.map((x) => x + x).toArray(), - /boom/, - ).then(common.mustCall()); -} + return true + }) + assert.rejects(stream.map((x) => x + x).toArray(), /boom/).then(common.mustCall()) +} { // Throwing an error during `filter` (async) const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => { if (x === 3) { - throw new Error('boom'); + throw new Error('boom') } - return true; - }); - assert.rejects( - stream.filter(() => true).toArray(), - /boom/, - ).then(common.mustCall()); -} + return true + }) + assert.rejects(stream.filter(() => true).toArray(), /boom/).then(common.mustCall()) +} { // Concurrency + AbortSignal - const ac = new AbortController(); - let calls = 0; - const stream = Readable.from([1, 2, 3, 4]).filter(async (_, { signal }) => { - calls++; - await once(signal, 'abort'); - }, { signal: ac.signal, concurrency: 2 }); - // pump - assert.rejects(async () => { - for await (const item of stream) { - // nope - silentConsole.log(item); + const ac = new AbortController() + let calls = 0 + const stream = Readable.from([1, 2, 3, 4]).filter( + async (_, { signal }) => { + calls++ + await once(signal, 'abort') + }, + { + signal: ac.signal, + concurrency: 2 } - }, { - name: 'AbortError', - }).then(common.mustCall()); - + ) // pump + + assert + .rejects( + async () => { + for await (const item of stream) { + // nope + silentConsole.log(item) + } + }, + { + name: 'AbortError' + } + ) + .then(common.mustCall()) setImmediate(() => { - ac.abort(); - assert.strictEqual(calls, 2); - }); + ac.abort() + assert.strictEqual(calls, 2) + }) } - { // Concurrency result order - const stream = Readable.from([1, 2]).filter(async (item, { signal }) => { - await setTimeout(10 - item, { signal }); - return true; - }, { concurrency: 2 }); + const stream = Readable.from([1, 2]).filter( + async (item, { signal }) => { + await setTimeout(10 - item, { + signal + }) + return true + }, + { + concurrency: 2 + } + ) + ;(async () => { + const expected = [1, 2] - (async () => { - const expected = [1, 2]; for await (const item of stream) { - assert.strictEqual(item, expected.shift()); + assert.strictEqual(item, expected.shift()) } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Error cases - assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/); - assert.throws(() => Readable.from([1]).filter((x) => x, { - concurrency: 'Foo' - }), /ERR_OUT_OF_RANGE/); - assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/) + assert.throws( + () => + Readable.from([1]).filter((x) => x, { + concurrency: 'Foo' + }), + /ERR_OUT_OF_RANGE/ + ) + assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/) } { // Test result is a Readable - const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true); - assert.strictEqual(stream.readable, true); + const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true) + assert.strictEqual(stream.readable, true) } { - const stream = Readable.from([1, 2, 3, 4, 5]); + const stream = Readable.from([1, 2, 3, 4, 5]) Object.defineProperty(stream, 'map', { - value: common.mustNotCall(() => {}), - }); - // Check that map isn't getting called. - stream.filter(() => true); -} + value: common.mustNotCall(() => {}) + }) // Check that map isn't getting called. - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + stream.filter(() => true) +} +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js index 77b55d24c6..5f659f667e 100644 --- a/test/parallel/test-stream-finished.js +++ b/test/parallel/test-stream-finished.js @@ -1,682 +1,762 @@ +'use strict' - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; - -const common = require('../common'); -const { - Writable, - Readable, - Transform, - finished, - Duplex, - PassThrough, - Stream, -} = require('../../lib/ours/index'); -const assert = require('assert'); -const EE = require('events'); -const fs = require('fs'); -const { promisify } = require('util'); -const http = require('http'); +const tap = require('tap') + +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const { Writable, Readable, Transform, finished, Duplex, PassThrough, Stream } = require('../../lib/ours/index') + +const assert = require('assert') + +const EE = require('events') + +const fs = require('fs') + +const { promisify } = require('util') + +const http = require('http') { const rs = new Readable({ read() {} - }); - - finished(rs, common.mustSucceed()); - - rs.push(null); - rs.resume(); + }) + finished(rs, common.mustSucceed()) + rs.push(null) + rs.resume() } - { const ws = new Writable({ write(data, enc, cb) { - cb(); + cb() } - }); - - finished(ws, common.mustSucceed()); - - ws.end(); + }) + finished(ws, common.mustSucceed()) + ws.end() } - { const tr = new Transform({ transform(data, enc, cb) { - cb(); + cb() } - }); - - let finish = false; - let ended = false; - + }) + let finish = false + let ended = false tr.on('end', () => { - ended = true; - }); - + ended = true + }) tr.on('finish', () => { - finish = true; - }); - - finished(tr, common.mustSucceed(() => { - assert(finish); - assert(ended); - })); - - tr.end(); - tr.resume(); + finish = true + }) + finished( + tr, + common.mustSucceed(() => { + assert(finish) + assert(ended) + }) + ) + tr.end() + tr.resume() } - { - const rs = fs.createReadStream(__filename); - - rs.resume(); - finished(rs, common.mustCall()); + const rs = fs.createReadStream(__filename) + rs.resume() + finished(rs, common.mustCall()) } - { - const finishedPromise = promisify(finished); + const finishedPromise = promisify(finished) async function run() { - const rs = fs.createReadStream(__filename); - const done = common.mustCall(); - - let ended = false; - rs.resume(); + const rs = fs.createReadStream(__filename) + const done = common.mustCall() + let ended = false + rs.resume() rs.on('end', () => { - ended = true; - }); - await finishedPromise(rs); - assert(ended); - done(); + ended = true + }) + await finishedPromise(rs) + assert(ended) + done() } - run(); + run() } - { // Check pre-cancelled - const signal = new EventTarget(); - signal.aborted = true; - - const rs = Readable.from((function* () {})()); - finished(rs, { signal }, common.mustCall((err) => { - assert.strictEqual(err.name, 'AbortError'); - })); + const signal = new EventTarget() + signal.aborted = true + const rs = Readable.from((function* () {})()) + finished( + rs, + { + signal + }, + common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError') + }) + ) } - { // Check cancelled before the stream ends sync. - const ac = new AbortController(); - const { signal } = ac; - - const rs = Readable.from((function* () {})()); - finished(rs, { signal }, common.mustCall((err) => { - assert.strictEqual(err.name, 'AbortError'); - })); - - ac.abort(); + const ac = new AbortController() + const { signal } = ac + const rs = Readable.from((function* () {})()) + finished( + rs, + { + signal + }, + common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError') + }) + ) + ac.abort() } - { // Check cancelled before the stream ends async. - const ac = new AbortController(); - const { signal } = ac; - - const rs = Readable.from((function* () {})()); - setTimeout(() => ac.abort(), 1); - finished(rs, { signal }, common.mustCall((err) => { - assert.strictEqual(err.name, 'AbortError'); - })); + const ac = new AbortController() + const { signal } = ac + const rs = Readable.from((function* () {})()) + setTimeout(() => ac.abort(), 1) + finished( + rs, + { + signal + }, + common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError') + }) + ) } - { // Check cancelled after doesn't throw. - const ac = new AbortController(); - const { signal } = ac; - - const rs = Readable.from((function* () { - yield 5; - setImmediate(() => ac.abort()); - })()); - rs.resume(); - finished(rs, { signal }, common.mustSucceed()); + const ac = new AbortController() + const { signal } = ac + const rs = Readable.from( + (function* () { + yield 5 + setImmediate(() => ac.abort()) + })() + ) + rs.resume() + finished( + rs, + { + signal + }, + common.mustSucceed() + ) } - { // Promisified abort works - const finishedPromise = promisify(finished); + const finishedPromise = promisify(finished) + async function run() { - const ac = new AbortController(); - const { signal } = ac; - const rs = Readable.from((function* () {})()); - setImmediate(() => ac.abort()); - await finishedPromise(rs, { signal }); + const ac = new AbortController() + const { signal } = ac + const rs = Readable.from((function* () {})()) + setImmediate(() => ac.abort()) + await finishedPromise(rs, { + signal + }) } - assert.rejects(run, { name: 'AbortError' }).then(common.mustCall()); + assert + .rejects(run, { + name: 'AbortError' + }) + .then(common.mustCall()) } - { // Promisified pre-aborted works - const finishedPromise = promisify(finished); + const finishedPromise = promisify(finished) + async function run() { - const signal = new EventTarget(); - signal.aborted = true; - const rs = Readable.from((function* () {})()); - await finishedPromise(rs, { signal }); + const signal = new EventTarget() + signal.aborted = true + const rs = Readable.from((function* () {})()) + await finishedPromise(rs, { + signal + }) } - assert.rejects(run, { name: 'AbortError' }).then(common.mustCall()); + assert + .rejects(run, { + name: 'AbortError' + }) + .then(common.mustCall()) } - - { - const rs = fs.createReadStream('file-does-not-exist'); - - finished(rs, common.expectsError({ - code: 'ENOENT' - })); + const rs = fs.createReadStream('file-does-not-exist') + finished( + rs, + common.expectsError({ + code: 'ENOENT' + }) + ) } - { - const rs = new Readable(); - - finished(rs, common.mustSucceed()); + const rs = new Readable() + finished(rs, common.mustSucceed()) + rs.push(null) + rs.emit('close') // Should not trigger an error - rs.push(null); - rs.emit('close'); // Should not trigger an error - rs.resume(); + rs.resume() } - { - const rs = new Readable(); - - finished(rs, common.mustCall((err) => { - assert(err, 'premature close error'); - })); + const rs = new Readable() + finished( + rs, + common.mustCall((err) => { + assert(err, 'premature close error') + }) + ) + rs.emit('close') // Should trigger error - rs.emit('close'); // Should trigger error - rs.push(null); - rs.resume(); -} + rs.push(null) + rs.resume() +} // Test faulty input values and options. -// Test faulty input values and options. { const rs = new Readable({ read() {} - }); + }) + assert.throws(() => finished(rs, 'foo'), { + code: 'ERR_INVALID_ARG_TYPE', + message: /callback/ + }) + assert.throws(() => finished(rs, 'foo', () => {}), { + code: 'ERR_INVALID_ARG_TYPE', + message: /options/ + }) + assert.throws(() => finished(rs, {}, 'foo'), { + code: 'ERR_INVALID_ARG_TYPE', + message: /callback/ + }) + finished(rs, null, common.mustCall()) + rs.push(null) + rs.resume() +} // Test that calling returned function removes listeners - assert.throws( - () => finished(rs, 'foo'), - { - code: 'ERR_INVALID_ARG_TYPE', - message: /callback/ - } - ); - assert.throws( - () => finished(rs, 'foo', () => {}), - { - code: 'ERR_INVALID_ARG_TYPE', - message: /options/ - } - ); - assert.throws( - () => finished(rs, {}, 'foo'), - { - code: 'ERR_INVALID_ARG_TYPE', - message: /callback/ - } - ); - - finished(rs, null, common.mustCall()); - - rs.push(null); - rs.resume(); -} - -// Test that calling returned function removes listeners { const ws = new Writable({ write(data, env, cb) { - cb(); + cb() } - }); - const removeListener = finished(ws, common.mustNotCall()); - removeListener(); - ws.end(); + }) + const removeListener = finished(ws, common.mustNotCall()) + removeListener() + ws.end() } - { - const rs = new Readable(); - const removeListeners = finished(rs, common.mustNotCall()); - removeListeners(); - - rs.emit('close'); - rs.push(null); - rs.resume(); + const rs = new Readable() + const removeListeners = finished(rs, common.mustNotCall()) + removeListeners() + rs.emit('close') + rs.push(null) + rs.resume() } - { - const streamLike = new EE(); - streamLike.readableEnded = true; - streamLike.readable = true; + const streamLike = new EE() + streamLike.readableEnded = true + streamLike.readable = true assert.throws( () => { - finished(streamLike, () => {}); + finished(streamLike, () => {}) }, - { code: 'ERR_INVALID_ARG_TYPE' } - ); - streamLike.emit('close'); -} - -{ - const writable = new Writable({ write() {} }); - writable.writable = false; - writable.destroy(); - finished(writable, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); -} - -{ - const readable = new Readable(); - readable.readable = false; - readable.destroy(); - finished(readable, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); + { + code: 'ERR_INVALID_ARG_TYPE' + } + ) + streamLike.emit('close') +} +{ + const writable = new Writable({ + write() {} + }) + writable.writable = false + writable.destroy() + finished( + writable, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) + ) +} +{ + const readable = new Readable() + readable.readable = false + readable.destroy() + finished( + readable, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) + ) } - { const w = new Writable({ write(chunk, encoding, callback) { - setImmediate(callback); + setImmediate(callback) } - }); - finished(w, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); - w.end('asd'); - w.destroy(); + }) + finished( + w, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) + ) + w.end('asd') + w.destroy() } function testClosed(factory) { { // If already destroyed but finished is cancelled in same tick // don't invoke the callback, - - const s = factory(); - s.destroy(); - const dispose = finished(s, common.mustNotCall()); - dispose(); + const s = factory() + s.destroy() + const dispose = finished(s, common.mustNotCall()) + dispose() } - { // If already destroyed invoked callback. - - const s = factory(); - s.destroy(); - finished(s, common.mustCall()); + const s = factory() + s.destroy() + finished(s, common.mustCall()) } - { // Don't invoke until destroy has completed. - - let destroyed = false; + let destroyed = false const s = factory({ destroy(err, cb) { setImmediate(() => { - destroyed = true; - cb(); - }); + destroyed = true + cb() + }) } - }); - s.destroy(); - finished(s, common.mustCall(() => { - assert.strictEqual(destroyed, true); - })); + }) + s.destroy() + finished( + s, + common.mustCall(() => { + assert.strictEqual(destroyed, true) + }) + ) } - { // Invoke callback even if close is inhibited. - const s = factory({ emitClose: false, + destroy(err, cb) { - cb(); - finished(s, common.mustCall()); + cb() + finished(s, common.mustCall()) } - }); - s.destroy(); + }) + s.destroy() } - { // Invoke with deep async. - const s = factory({ destroy(err, cb) { setImmediate(() => { - cb(); + cb() setImmediate(() => { - finished(s, common.mustCall()); - }); - }); + finished(s, common.mustCall()) + }) + }) } - }); - s.destroy(); + }) + s.destroy() } } -testClosed((opts) => new Readable({ ...opts })); -testClosed((opts) => new Writable({ write() {}, ...opts })); +testClosed((opts) => new Readable({ ...opts })) +testClosed( + (opts) => + new Writable({ + write() {}, + ...opts + }) +) { const w = new Writable({ write(chunk, encoding, cb) { - cb(); + cb() }, + autoDestroy: false - }); - w.end('asd'); + }) + w.end('asd') process.nextTick(() => { - finished(w, common.mustCall()); - }); + finished(w, common.mustCall()) + }) } - { const w = new Writable({ write(chunk, encoding, cb) { - cb(new Error()); + cb(new Error()) }, + autoDestroy: false - }); - w.write('asd'); - w.on('error', common.mustCall(() => { - finished(w, common.mustCall()); - })); + }) + w.write('asd') + w.on( + 'error', + common.mustCall(() => { + finished(w, common.mustCall()) + }) + ) } - { const r = new Readable({ autoDestroy: false - }); - r.push(null); - r.resume(); - r.on('end', common.mustCall(() => { - finished(r, common.mustCall()); - })); -} - -{ - const rs = fs.createReadStream(__filename, { autoClose: false }); - rs.resume(); - rs.on('close', common.mustNotCall()); - rs.on('end', common.mustCall(() => { - finished(rs, common.mustCall()); - })); -} - -{ - const d = new EE(); - d._writableState = {}; - d._writableState.finished = true; - finished(d, { readable: false, writable: true }, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); - d._writableState.errored = true; - d.emit('close'); -} - -{ - const r = new Readable(); - finished(r, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); - r.push('asd'); - r.push(null); - r.destroy(); + }) + r.push(null) + r.resume() + r.on( + 'end', + common.mustCall(() => { + finished(r, common.mustCall()) + }) + ) +} +{ + const rs = fs.createReadStream(__filename, { + autoClose: false + }) + rs.resume() + rs.on('close', common.mustNotCall()) + rs.on( + 'end', + common.mustCall(() => { + finished(rs, common.mustCall()) + }) + ) +} +{ + const d = new EE() + d._writableState = {} + d._writableState.finished = true + finished( + d, + { + readable: false, + writable: true + }, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) + ) + d._writableState.errored = true + d.emit('close') +} +{ + const r = new Readable() + finished( + r, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) + ) + r.push('asd') + r.push(null) + r.destroy() } - { const d = new Duplex({ - final(cb) { }, // Never close writable side for test purpose + final(cb) {}, + + // Never close writable side for test purpose read() { - this.push(null); + this.push(null) } - }); - - d.on('end', common.mustCall()); - - finished(d, { readable: true, writable: false }, common.mustCall()); - - d.end(); - d.resume(); + }) + d.on('end', common.mustCall()) + finished( + d, + { + readable: true, + writable: false + }, + common.mustCall() + ) + d.end() + d.resume() } - { const d = new Duplex({ - final(cb) { }, // Never close writable side for test purpose + final(cb) {}, + + // Never close writable side for test purpose read() { - this.push(null); + this.push(null) } - }); - - d.on('end', common.mustCall()); - - d.end(); - finished(d, { readable: true, writable: false }, common.mustCall()); - - d.resume(); + }) + d.on('end', common.mustCall()) + d.end() + finished( + d, + { + readable: true, + writable: false + }, + common.mustCall() + ) + d.resume() } - { // Test for compat for e.g. fd-slicer which implements // non standard destroy behavior which might not emit // 'close'. - const r = new Readable(); - finished(r, common.mustCall()); - r.resume(); - r.push('asd'); - r.destroyed = true; - r.push(null); + const r = new Readable() + finished(r, common.mustCall()) + r.resume() + r.push('asd') + r.destroyed = true + r.push(null) } - { // Regression https://github.com/nodejs/node/issues/33130 - const response = new PassThrough(); + const response = new PassThrough() class HelloWorld extends Duplex { constructor(response) { super({ autoDestroy: false - }); - - this.response = response; - this.readMore = false; - + }) + this.response = response + this.readMore = false response.once('end', () => { - this.push(null); - }); - + this.push(null) + }) response.on('readable', () => { if (this.readMore) { - this._read(); + this._read() } - }); + }) } _read() { - const { response } = this; - - this.readMore = true; + const { response } = this + this.readMore = true if (response.readableLength) { - this.readMore = false; + this.readMore = false } - let data; + let data + while ((data = response.read()) !== null) { - this.push(data); + this.push(data) } } } - const instance = new HelloWorld(response); - instance.setEncoding('utf8'); - instance.end(); - - (async () => { - await EE.once(instance, 'finish'); - + const instance = new HelloWorld(response) + instance.setEncoding('utf8') + instance.end() + ;(async () => { + await EE.once(instance, 'finish') setImmediate(() => { - response.write('chunk 1'); - response.write('chunk 2'); - response.write('chunk 3'); - response.end(); - }); + response.write('chunk 1') + response.write('chunk 2') + response.write('chunk 3') + response.end() + }) + let res = '' - let res = ''; for await (const data of instance) { - res += data; + res += data } - assert.strictEqual(res, 'chunk 1chunk 2chunk 3'); - })().then(common.mustCall()); -} - -{ - const p = new PassThrough(); - p.end(); - finished(p, common.mustNotCall()); -} - -{ - const p = new PassThrough(); - p.end(); - p.on('finish', common.mustCall(() => { - finished(p, common.mustNotCall()); - })); -} - -{ - const server = http.createServer(common.mustCall((req, res) => { - res.on('finish', common.mustCall(() => { - finished(res, common.mustCall(() => { - server.close(); - })); - })); - res.end(); - })) - .listen(0, function() { - http.request({ - method: 'GET', - port: this.address().port - }).end() - .on('response', common.mustCall()); - }); -} - -{ - const server = http.createServer(common.mustCall((req, res) => { - req.on('close', common.mustCall(() => { - finished(req, common.mustCall(() => { - server.close(); - })); - })); - req.destroy(); - })).listen(0, function() { - http.request({ - method: 'GET', - port: this.address().port - }).end().on('error', common.mustCall()); - }); + assert.strictEqual(res, 'chunk 1chunk 2chunk 3') + })().then(common.mustCall()) +} +{ + const p = new PassThrough() + p.end() + finished(p, common.mustNotCall()) +} +{ + const p = new PassThrough() + p.end() + p.on( + 'finish', + common.mustCall(() => { + finished(p, common.mustNotCall()) + }) + ) +} +{ + const server = http + .createServer( + common.mustCall((req, res) => { + res.on( + 'finish', + common.mustCall(() => { + finished( + res, + common.mustCall(() => { + server.close() + }) + ) + }) + ) + res.end() + }) + ) + .listen(0, function () { + http + .request({ + method: 'GET', + port: this.address().port + }) + .end() + .on('response', common.mustCall()) + }) +} +{ + const server = http + .createServer( + common.mustCall((req, res) => { + req.on( + 'close', + common.mustCall(() => { + finished( + req, + common.mustCall(() => { + server.close() + }) + ) + }) + ) + req.destroy() + }) + ) + .listen(0, function () { + http + .request({ + method: 'GET', + port: this.address().port + }) + .end() + .on('error', common.mustCall()) + }) } - { const w = new Writable({ write(chunk, encoding, callback) { - process.nextTick(callback); + process.nextTick(callback) } - }); - w.aborted = false; - w.end(); - let closed = false; + }) + w.aborted = false + w.end() + let closed = false w.on('finish', () => { - assert.strictEqual(closed, false); - w.emit('aborted'); - }); - w.on('close', common.mustCall(() => { - closed = true; - })); - - finished(w, common.mustCall(() => { - assert.strictEqual(closed, true); - })); + assert.strictEqual(closed, false) + w.emit('aborted') + }) + w.on( + 'close', + common.mustCall(() => { + closed = true + }) + ) + finished( + w, + common.mustCall(() => { + assert.strictEqual(closed, true) + }) + ) +} +{ + const w = new Writable() + + const _err = new Error() + + w.destroy(_err) + assert.strictEqual(w.errored, _err) + finished( + w, + common.mustCall((err) => { + assert.strictEqual(_err, err) + assert.strictEqual(w.closed, true) + finished( + w, + common.mustCall((err) => { + assert.strictEqual(_err, err) + }) + ) + }) + ) +} +{ + const w = new Writable() + w.destroy() + assert.strictEqual(w.errored, null) + finished( + w, + common.mustCall((err) => { + assert.strictEqual(w.closed, true) + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + finished( + w, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) + ) + }) + ) } - -{ - const w = new Writable(); - const _err = new Error(); - w.destroy(_err); - assert.strictEqual(w.errored, _err); - finished(w, common.mustCall((err) => { - assert.strictEqual(_err, err); - assert.strictEqual(w.closed, true); - finished(w, common.mustCall((err) => { - assert.strictEqual(_err, err); - })); - })); -} - -{ - const w = new Writable(); - w.destroy(); - assert.strictEqual(w.errored, null); - finished(w, common.mustCall((err) => { - assert.strictEqual(w.closed, true); - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - finished(w, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - })); - })); -} - { // Legacy Streams do not inherit from Readable or Writable. // We cannot really assume anything about them, so we cannot close them // automatically. - const s = new Stream(); - finished(s, common.mustNotCall()); -} - -{ - const server = http.createServer(common.mustCall(function(req, res) { - fs.createReadStream(__filename).pipe(res); - finished(res, common.mustCall(function(err) { - assert.strictEqual(err, undefined); - })); - })).listen(0, function() { - http.request( - { method: 'GET', port: this.address().port }, - common.mustCall(function(res) { - res.resume(); - server.close(); + const s = new Stream() + finished(s, common.mustNotCall()) +} +{ + const server = http + .createServer( + common.mustCall(function (req, res) { + fs.createReadStream(__filename).pipe(res) + finished( + res, + common.mustCall(function (err) { + assert.strictEqual(err, undefined) + }) + ) }) - ).end(); - }); -} - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + ) + .listen(0, function () { + http + .request( + { + method: 'GET', + port: this.address().port + }, + common.mustCall(function (res) { + res.resume() + server.close() + }) + ) + .end() + }) +} +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js index 07d569d5fa..d653682276 100644 --- a/test/parallel/test-stream-flatMap.js +++ b/test/parallel/test-stream-flatMap.js @@ -1,154 +1,192 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const fixtures = require('../common/fixtures'); -const { - Readable, -} = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') - const st = require('timers').setTimeout; +const fixtures = require('../common/fixtures') - function setTimeout(ms) { - return new Promise(resolve => { - st(resolve, ms); - }); - } - -const { createReadStream } = require('fs'); +const { Readable } = require('../../lib/ours/index') + +const assert = require('assert') + +const st = require('timers').setTimeout + +function setTimeout(ms) { + return new Promise((resolve) => { + st(resolve, ms) + }) +} + +const { createReadStream } = require('fs') function oneTo5() { - return Readable.from([1, 2, 3, 4, 5]); + return Readable.from([1, 2, 3, 4, 5]) } { // flatMap works on synchronous streams with a synchronous mapper - (async () => { + ;(async () => { assert.deepStrictEqual( - await oneTo5().flatMap((x) => [x + x]).toArray(), + await oneTo5() + .flatMap((x) => [x + x]) + .toArray(), [2, 4, 6, 8, 10] - ); + ) assert.deepStrictEqual( - await oneTo5().flatMap(() => []).toArray(), + await oneTo5() + .flatMap(() => []) + .toArray(), [] - ); + ) assert.deepStrictEqual( - await oneTo5().flatMap((x) => [x, x]).toArray(), + await oneTo5() + .flatMap((x) => [x, x]) + .toArray(), [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] - ); - })().then(common.mustCall()); + ) + })().then(common.mustCall()) } - - { // flatMap works on sync/async streams with an asynchronous mapper - (async () => { - assert.deepStrictEqual( - await oneTo5().flatMap(async (x) => [x, x]).toArray(), - [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] - ); - const asyncOneTo5 = oneTo5().map(async (x) => x); + ;(async () => { assert.deepStrictEqual( - await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), + await oneTo5() + .flatMap(async (x) => [x, x]) + .toArray(), [1, 1, 2, 2, 3, 3, 4, 4, 5, 5] - ); - })().then(common.mustCall()); + ) + const asyncOneTo5 = oneTo5().map(async (x) => x) + assert.deepStrictEqual(await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]) + })().then(common.mustCall()) } { // flatMap works on a stream where mapping returns a stream - (async () => { - const result = await oneTo5().flatMap(async (x) => { - return Readable.from([x, x]); - }).toArray(); - assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]); - })().then(common.mustCall()); - // flatMap works on an objectMode stream where mappign returns a stream - (async () => { - const result = await oneTo5().flatMap(() => { - return createReadStream(fixtures.path('x.txt')); - }).toArray(); - // The resultant stream is in object mode so toArray shouldn't flatten - assert.strictEqual(result.length, 5); + ;(async () => { + const result = await oneTo5() + .flatMap(async (x) => { + return Readable.from([x, x]) + }) + .toArray() + assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]) + })().then(common.mustCall()) // flatMap works on an objectMode stream where mappign returns a stream + ;(async () => { + const result = await oneTo5() + .flatMap(() => { + return createReadStream(fixtures.path('x.txt')) + }) + .toArray() // The resultant stream is in object mode so toArray shouldn't flatten + + assert.strictEqual(result.length, 5) assert.deepStrictEqual( Buffer.concat(result).toString(), (process.platform === 'win32' ? 'xyz\r\n' : 'xyz\n').repeat(5) - ); - - })().then(common.mustCall()); - + ) + })().then(common.mustCall()) } - { // Concurrency + AbortSignal - const ac = new AbortController(); - const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { - await setTimeout(100, { signal }); - }), { signal: ac.signal, concurrency: 2 }); - // pump - assert.rejects(async () => { - for await (const item of stream) { - // nope - silentConsole.log(item); + const ac = new AbortController() + const stream = oneTo5().flatMap( + common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { + signal + }) + }), + { + signal: ac.signal, + concurrency: 2 } - }, { - name: 'AbortError', - }).then(common.mustCall()); - + ) // pump + + assert + .rejects( + async () => { + for await (const item of stream) { + // nope + silentConsole.log(item) + } + }, + { + name: 'AbortError' + } + ) + .then(common.mustCall()) queueMicrotask(() => { - ac.abort(); - }); + ac.abort() + }) } - { // Already aborted AbortSignal - const stream = oneTo5().flatMap(common.mustNotCall(async (_, { signal }) => { - await setTimeout(100, { signal }); - }), { signal: AbortSignal.abort() }); - // pump - assert.rejects(async () => { - for await (const item of stream) { - // nope - silentConsole.log(item); + const stream = oneTo5().flatMap( + common.mustNotCall(async (_, { signal }) => { + await setTimeout(100, { + signal + }) + }), + { + signal: AbortSignal.abort() } - }, { - name: 'AbortError', - }).then(common.mustCall()); + ) // pump + + assert + .rejects( + async () => { + for await (const item of stream) { + // nope + silentConsole.log(item) + } + }, + { + name: 'AbortError' + } + ) + .then(common.mustCall()) } - { // Error cases - assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/); - assert.throws(() => Readable.from([1]).flatMap((x) => x, { - concurrency: 'Foo' - }), /ERR_OUT_OF_RANGE/); - assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/); - assert.throws(() => Readable.from([1]).flatMap((x) => x, { signal: true }), /ERR_INVALID_ARG_TYPE/); + assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/) + assert.throws( + () => + Readable.from([1]).flatMap((x) => x, { + concurrency: 'Foo' + }), + /ERR_OUT_OF_RANGE/ + ) + assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/) + assert.throws( + () => + Readable.from([1]).flatMap((x) => x, { + signal: true + }), + /ERR_INVALID_ARG_TYPE/ + ) } { // Test result is a Readable - const stream = oneTo5().flatMap((x) => x); - assert.strictEqual(stream.readable, true); + const stream = oneTo5().flatMap((x) => x) + assert.strictEqual(stream.readable, true) } { - const stream = oneTo5(); + const stream = oneTo5() Object.defineProperty(stream, 'map', { - value: common.mustNotCall(() => {}), - }); - // Check that map isn't getting called. - stream.flatMap(() => true); -} + value: common.mustNotCall(() => {}) + }) // Check that map isn't getting called. - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + stream.flatMap(() => true) +} +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js index 39e5e34efc..083864f06f 100644 --- a/test/parallel/test-stream-forEach.js +++ b/test/parallel/test-stream-forEach.js @@ -1,154 +1,193 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Readable, -} = require('../../lib/ours/index'); -const assert = require('assert'); -const { once } = require('events'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') + +const assert = require('assert') + +const { once } = require('events') { // forEach works on synchronous streams with a synchronous predicate - const stream = Readable.from([1, 2, 3]); - const result = [1, 2, 3]; - (async () => { - await stream.forEach((value) => assert.strictEqual(value, result.shift())); - })().then(common.mustCall()); + const stream = Readable.from([1, 2, 3]) + const result = [1, 2, 3] + ;(async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())) + })().then(common.mustCall()) } - { // forEach works an asynchronous streams const stream = Readable.from([1, 2, 3]).filter(async (x) => { - await Promise.resolve(); - return true; - }); - const result = [1, 2, 3]; - (async () => { - await stream.forEach((value) => assert.strictEqual(value, result.shift())); - })().then(common.mustCall()); + await Promise.resolve() + return true + }) + const result = [1, 2, 3] + ;(async () => { + await stream.forEach((value) => assert.strictEqual(value, result.shift())) + })().then(common.mustCall()) } - { // forEach works on asynchronous streams with a asynchronous forEach fn const stream = Readable.from([1, 2, 3]).filter(async (x) => { - await Promise.resolve(); - return true; - }); - const result = [1, 2, 3]; - (async () => { + await Promise.resolve() + return true + }) + const result = [1, 2, 3] + ;(async () => { await stream.forEach(async (value) => { - await Promise.resolve(); - assert.strictEqual(value, result.shift()); - }); - })().then(common.mustCall()); + await Promise.resolve() + assert.strictEqual(value, result.shift()) + }) + })().then(common.mustCall()) } - { // forEach works on an infinite stream - const ac = new AbortController(); - const { signal } = ac; - const stream = Readable.from(async function* () { - while (true) yield 1; - }(), { signal }); - let i = 0; - assert.rejects(stream.forEach(common.mustCall((x) => { - i++; - if (i === 10) ac.abort(); - assert.strictEqual(x, 1); - }, 10)), { name: 'AbortError' }).then(common.mustCall()); + const ac = new AbortController() + const { signal } = ac + const stream = Readable.from( + (async function* () { + while (true) yield 1 + })(), + { + signal + } + ) + let i = 0 + assert + .rejects( + stream.forEach( + common.mustCall((x) => { + i++ + if (i === 10) ac.abort() + assert.strictEqual(x, 1) + }, 10) + ), + { + name: 'AbortError' + } + ) + .then(common.mustCall()) } - { // Emitting an error during `forEach` - const stream = Readable.from([1, 2, 3, 4, 5]); - assert.rejects(stream.forEach(async (x) => { - if (x === 3) { - stream.emit('error', new Error('boom')); - } - }), /boom/).then(common.mustCall()); + const stream = Readable.from([1, 2, 3, 4, 5]) + assert + .rejects( + stream.forEach(async (x) => { + if (x === 3) { + stream.emit('error', new Error('boom')) + } + }), + /boom/ + ) + .then(common.mustCall()) } - { // Throwing an error during `forEach` (sync) - const stream = Readable.from([1, 2, 3, 4, 5]); - assert.rejects(stream.forEach((x) => { - if (x === 3) { - throw new Error('boom'); - } - }), /boom/).then(common.mustCall()); + const stream = Readable.from([1, 2, 3, 4, 5]) + assert + .rejects( + stream.forEach((x) => { + if (x === 3) { + throw new Error('boom') + } + }), + /boom/ + ) + .then(common.mustCall()) } - { // Throwing an error during `forEach` (async) - const stream = Readable.from([1, 2, 3, 4, 5]); - assert.rejects(stream.forEach(async (x) => { - if (x === 3) { - return Promise.reject(new Error('boom')); - } - }), /boom/).then(common.mustCall()); + const stream = Readable.from([1, 2, 3, 4, 5]) + assert + .rejects( + stream.forEach(async (x) => { + if (x === 3) { + return Promise.reject(new Error('boom')) + } + }), + /boom/ + ) + .then(common.mustCall()) } - { // Concurrency + AbortSignal - const ac = new AbortController(); - let calls = 0; - const forEachPromise = - Readable.from([1, 2, 3, 4]).forEach(async (_, { signal }) => { - calls++; - await once(signal, 'abort'); - }, { signal: ac.signal, concurrency: 2 }); - // pump - assert.rejects(async () => { - await forEachPromise; - }, { - name: 'AbortError', - }).then(common.mustCall()); + const ac = new AbortController() + let calls = 0 + const forEachPromise = Readable.from([1, 2, 3, 4]).forEach( + async (_, { signal }) => { + calls++ + await once(signal, 'abort') + }, + { + signal: ac.signal, + concurrency: 2 + } + ) // pump + assert + .rejects( + async () => { + await forEachPromise + }, + { + name: 'AbortError' + } + ) + .then(common.mustCall()) setImmediate(() => { - ac.abort(); - assert.strictEqual(calls, 2); - }); + ac.abort() + assert.strictEqual(calls, 2) + }) } - { // Error cases - assert.rejects(async () => { - await Readable.from([1]).forEach(1); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); - assert.rejects(async () => { - await Readable.from([1]).forEach((x) => x, { - concurrency: 'Foo' - }); - }, /ERR_OUT_OF_RANGE/).then(common.mustCall()); - assert.rejects(async () => { - await Readable.from([1]).forEach((x) => x, 1); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert + .rejects(async () => { + await Readable.from([1]).forEach(1) + }, /ERR_INVALID_ARG_TYPE/) + .then(common.mustCall()) + assert + .rejects(async () => { + await Readable.from([1]).forEach((x) => x, { + concurrency: 'Foo' + }) + }, /ERR_OUT_OF_RANGE/) + .then(common.mustCall()) + assert + .rejects(async () => { + await Readable.from([1]).forEach((x) => x, 1) + }, /ERR_INVALID_ARG_TYPE/) + .then(common.mustCall()) } { // Test result is a Promise - const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true); - assert.strictEqual(typeof stream.then, 'function'); + const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true) + assert.strictEqual(typeof stream.then, 'function') } { - const stream = Readable.from([1, 2, 3, 4, 5]); + const stream = Readable.from([1, 2, 3, 4, 5]) Object.defineProperty(stream, 'map', { - value: common.mustNotCall(() => {}), - }); - // Check that map isn't getting called. - stream.forEach(() => true); + value: common.mustNotCall(() => {}) + }) // Check that map isn't getting called. + + stream.forEach(() => true) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-inheritance.js b/test/parallel/test-stream-inheritance.js index 3507a337c4..730d9a6328 100644 --- a/test/parallel/test-stream-inheritance.js +++ b/test/parallel/test-stream-inheritance.js @@ -1,78 +1,76 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const { Readable, Writable, Duplex, Transform } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -const readable = new Readable({ read() {} }); -const writable = new Writable({ write() {} }); -const duplex = new Duplex({ read() {}, write() {} }); -const transform = new Transform({ transform() {} }); - -assert.ok(readable instanceof Readable); -assert.ok(!(writable instanceof Readable)); -assert.ok(duplex instanceof Readable); -assert.ok(transform instanceof Readable); + error() {} +} +require('../common') -assert.ok(!(readable instanceof Writable)); -assert.ok(writable instanceof Writable); -assert.ok(duplex instanceof Writable); -assert.ok(transform instanceof Writable); +const assert = require('assert') -assert.ok(!(readable instanceof Duplex)); -assert.ok(!(writable instanceof Duplex)); -assert.ok(duplex instanceof Duplex); -assert.ok(transform instanceof Duplex); +const { Readable, Writable, Duplex, Transform } = require('../../lib/ours/index') -assert.ok(!(readable instanceof Transform)); -assert.ok(!(writable instanceof Transform)); -assert.ok(!(duplex instanceof Transform)); -assert.ok(transform instanceof Transform); +const readable = new Readable({ + read() {} +}) +const writable = new Writable({ + write() {} +}) +const duplex = new Duplex({ + read() {}, -assert.ok(!(null instanceof Writable)); -assert.ok(!(undefined instanceof Writable)); + write() {} +}) +const transform = new Transform({ + transform() {} +}) +assert.ok(readable instanceof Readable) +assert.ok(!(writable instanceof Readable)) +assert.ok(duplex instanceof Readable) +assert.ok(transform instanceof Readable) +assert.ok(!(readable instanceof Writable)) +assert.ok(writable instanceof Writable) +assert.ok(duplex instanceof Writable) +assert.ok(transform instanceof Writable) +assert.ok(!(readable instanceof Duplex)) +assert.ok(!(writable instanceof Duplex)) +assert.ok(duplex instanceof Duplex) +assert.ok(transform instanceof Duplex) +assert.ok(!(readable instanceof Transform)) +assert.ok(!(writable instanceof Transform)) +assert.ok(!(duplex instanceof Transform)) +assert.ok(transform instanceof Transform) +assert.ok(!(null instanceof Writable)) +assert.ok(!(undefined instanceof Writable)) // Simple inheritance check for `Writable` works fine in a subclass constructor. -// Simple inheritance check for `Writable` works fine in a subclass constructor. function CustomWritable() { - assert.ok( - this instanceof CustomWritable, - `${this} does not inherit from CustomWritable` - ); - assert.ok( - this instanceof Writable, - `${this} does not inherit from Writable` - ); + assert.ok(this instanceof CustomWritable, `${this} does not inherit from CustomWritable`) + assert.ok(this instanceof Writable, `${this} does not inherit from Writable`) } -Object.setPrototypeOf(CustomWritable, Writable); -Object.setPrototypeOf(CustomWritable.prototype, Writable.prototype); - -new CustomWritable(); - -assert.throws( - CustomWritable, - { - code: 'ERR_ASSERTION', - constructor: assert.AssertionError, - message: 'undefined does not inherit from CustomWritable' - } -); +Object.setPrototypeOf(CustomWritable, Writable) +Object.setPrototypeOf(CustomWritable.prototype, Writable.prototype) +new CustomWritable() +assert.throws(CustomWritable, { + code: 'ERR_ASSERTION', + constructor: assert.AssertionError, + message: 'undefined does not inherit from CustomWritable' +}) class OtherCustomWritable extends Writable {} -assert(!(new OtherCustomWritable() instanceof CustomWritable)); -assert(!(new CustomWritable() instanceof OtherCustomWritable)); +assert(!(new OtherCustomWritable() instanceof CustomWritable)) +assert(!(new CustomWritable() instanceof OtherCustomWritable)) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js index 582f52b447..7ec86d1257 100644 --- a/test/parallel/test-stream-ispaused.js +++ b/test/parallel/test-stream-ispaused.js @@ -18,42 +18,41 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +require('../common') -const readable = new stream.Readable(); +const assert = require('assert') -// _read is a noop, here. -readable._read = Function(); +const stream = require('../../lib/ours/index') -// Default state of a stream is not "paused" -assert.ok(!readable.isPaused()); +const readable = new stream.Readable() // _read is a noop, here. -// Make the stream start flowing... -readable.on('data', Function()); +readable._read = Function() // Default state of a stream is not "paused" -// still not paused. -assert.ok(!readable.isPaused()); +assert.ok(!readable.isPaused()) // Make the stream start flowing... -readable.pause(); -assert.ok(readable.isPaused()); -readable.resume(); -assert.ok(!readable.isPaused()); +readable.on('data', Function()) // still not paused. - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +assert.ok(!readable.isPaused()) +readable.pause() +assert.ok(readable.isPaused()) +readable.resume() +assert.ok(!readable.isPaused()) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs index 9f09abeab6..8231f80ced 100644 --- a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs +++ b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs @@ -1,7 +1,7 @@ -import { mustCall } from '../common/index.mjs'; -import { Readable }from '../../lib/ours/index.js'; -import assert from 'assert'; -import tap from 'tap'; +import { mustCall } from '../common/index.mjs' +import { Readable } from '../../lib/ours/index.js' +import assert from 'assert' +import tap from 'tap' // These tests are manually ported from the draft PR for the test262 test suite // Authored by Rick Waldron in https://github.com/tc39/test262/pull/2818/files @@ -46,134 +46,131 @@ import tap from 'tap'; // * Ecma International Standards hereafter means Ecma International Standards // as well as Ecma Technical Reports - // Note all the tests that check AsyncIterator's prototype itself and things // that happen before stream conversion were not ported. { // asIndexedPairs/is-function - assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function'); + assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function') // asIndexedPairs/indexed-pairs.js - const iterator = Readable.from([0, 1]); - const indexedPairs = iterator.asIndexedPairs(); + const iterator = Readable.from([0, 1]) + const indexedPairs = iterator.asIndexedPairs() for await (const [i, v] of indexedPairs) { - assert.strictEqual(i, v); + assert.strictEqual(i, v) } // asIndexedPairs/length.js - assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0); + assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0) // asIndexedPairs/name.js - assert.strictEqual(Readable.prototype.asIndexedPairs.name, 'asIndexedPairs'); - const descriptor = Object.getOwnPropertyDescriptor( - Readable.prototype, - 'asIndexedPairs' - ); - assert.strictEqual(descriptor.enumerable, false); - assert.strictEqual(descriptor.configurable, true); - assert.strictEqual(descriptor.writable, true); + assert.strictEqual(Readable.prototype.asIndexedPairs.name, 'asIndexedPairs') + const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'asIndexedPairs') + assert.strictEqual(descriptor.enumerable, false) + assert.strictEqual(descriptor.configurable, true) + assert.strictEqual(descriptor.writable, true) } { // drop/length - assert.strictEqual(Readable.prototype.drop.length, 1); - const descriptor = Object.getOwnPropertyDescriptor( - Readable.prototype, - 'drop' - ); - assert.strictEqual(descriptor.enumerable, false); - assert.strictEqual(descriptor.configurable, true); - assert.strictEqual(descriptor.writable, true); + assert.strictEqual(Readable.prototype.drop.length, 1) + const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'drop') + assert.strictEqual(descriptor.enumerable, false) + assert.strictEqual(descriptor.configurable, true) + assert.strictEqual(descriptor.writable, true) // drop/limit-equals-total - const iterator = Readable.from([1, 2]).drop(2); - const result = await iterator[Symbol.asyncIterator]().next(); - assert.deepStrictEqual(result, { done: true, value: undefined }); + const iterator = Readable.from([1, 2]).drop(2) + const result = await iterator[Symbol.asyncIterator]().next() + assert.deepStrictEqual(result, { done: true, value: undefined }) // drop/limit-greater-than-total.js - const iterator2 = Readable.from([1, 2]).drop(3); - const result2 = await iterator2[Symbol.asyncIterator]().next(); - assert.deepStrictEqual(result2, { done: true, value: undefined }); + const iterator2 = Readable.from([1, 2]).drop(3) + const result2 = await iterator2[Symbol.asyncIterator]().next() + assert.deepStrictEqual(result2, { done: true, value: undefined }) // drop/limit-less-than-total.js - const iterator3 = Readable.from([1, 2]).drop(1); - const result3 = await iterator3[Symbol.asyncIterator]().next(); - assert.deepStrictEqual(result3, { done: false, value: 2 }); + const iterator3 = Readable.from([1, 2]).drop(1) + const result3 = await iterator3[Symbol.asyncIterator]().next() + assert.deepStrictEqual(result3, { done: false, value: 2 }) // drop/limit-rangeerror - assert.throws(() => Readable.from([1]).drop(-1), RangeError); + assert.throws(() => Readable.from([1]).drop(-1), RangeError) assert.throws(() => { Readable.from([1]).drop({ valueOf() { - throw new Error('boom'); + throw new Error('boom') } - }); - }, /boom/); + }) + }, /boom/) // drop/limit-tointeger - const two = await Readable.from([1, 2]).drop({ valueOf: () => 1 }).toArray(); - assert.deepStrictEqual(two, [2]); + const two = await Readable.from([1, 2]) + .drop({ valueOf: () => 1 }) + .toArray() + assert.deepStrictEqual(two, [2]) // drop/name - assert.strictEqual(Readable.prototype.drop.name, 'drop'); + assert.strictEqual(Readable.prototype.drop.name, 'drop') // drop/non-constructible - assert.throws(() => new Readable.prototype.drop(1), TypeError); + assert.throws(() => new Readable.prototype.drop(1), TypeError) // drop/proto - const proto = Object.getPrototypeOf(Readable.prototype.drop); - assert.strictEqual(proto, Function.prototype); + const proto = Object.getPrototypeOf(Readable.prototype.drop) + assert.strictEqual(proto, Function.prototype) } { // every/abrupt-iterator-close - const stream = Readable.from([1, 2, 3]); - const e = new Error(); - await assert.rejects(stream.every(mustCall(() => { - throw e; - }, 1)), e); + const stream = Readable.from([1, 2, 3]) + const e = new Error() + await assert.rejects( + stream.every( + mustCall(() => { + throw e + }, 1) + ), + e + ) } { // every/callable-fn - await assert.rejects(Readable.from([1, 2]).every({}), TypeError); + await assert.rejects(Readable.from([1, 2]).every({}), TypeError) } { // every/callable - Readable.prototype.every.call(Readable.from([]), () => {}); + Readable.prototype.every.call(Readable.from([]), () => {}) // eslint-disable-next-line array-callback-return - Readable.from([]).every(() => {}); + Readable.from([]).every(() => {}) assert.throws(() => { - const r = Readable.from([]); - new r.every(() => {}); - }, TypeError); + const r = Readable.from([]) + new r.every(() => {}) + }, TypeError) } { // every/false - const iterator = Readable.from([1, 2, 3]); - const result = await iterator.every((v) => v === 1); - assert.strictEqual(result, false); + const iterator = Readable.from([1, 2, 3]) + const result = await iterator.every((v) => v === 1) + assert.strictEqual(result, false) } { // every/every - const iterator = Readable.from([1, 2, 3]); - const result = await iterator.every((v) => true); - assert.strictEqual(result, true); + const iterator = Readable.from([1, 2, 3]) + const result = await iterator.every((v) => true) + assert.strictEqual(result, true) } { // every/is-function - assert.strictEqual(typeof Readable.prototype.every, 'function'); + assert.strictEqual(typeof Readable.prototype.every, 'function') } { // every/length - assert.strictEqual(Readable.prototype.every.length, 1); + assert.strictEqual(Readable.prototype.every.length, 1) // every/name - assert.strictEqual(Readable.prototype.every.name, 'every'); + assert.strictEqual(Readable.prototype.every.name, 'every') // every/propdesc - const descriptor = Object.getOwnPropertyDescriptor( - Readable.prototype, - 'every' - ); - assert.strictEqual(descriptor.enumerable, false); - assert.strictEqual(descriptor.configurable, true); - assert.strictEqual(descriptor.writable, true); + const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'every') + assert.strictEqual(descriptor.enumerable, false) + assert.strictEqual(descriptor.configurable, true) + assert.strictEqual(descriptor.writable, true) } - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-objectmode-undefined.js b/test/parallel/test-stream-objectmode-undefined.js index 2b4ac5e1c4..9dac7a3973 100644 --- a/test/parallel/test-stream-objectmode-undefined.js +++ b/test/parallel/test-stream-objectmode-undefined.js @@ -1,59 +1,64 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable, Transform } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable, Writable, Transform } = require('../../lib/ours/index') { const stream = new Readable({ objectMode: true, read: common.mustCall(() => { - stream.push(undefined); - stream.push(null); + stream.push(undefined) + stream.push(null) }) - }); - - stream.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk, undefined); - })); + }) + stream.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined) + }) + ) } - { const stream = new Writable({ objectMode: true, write: common.mustCall((chunk) => { - assert.strictEqual(chunk, undefined); + assert.strictEqual(chunk, undefined) }) - }); - - stream.write(undefined); + }) + stream.write(undefined) } - { const stream = new Transform({ objectMode: true, transform: common.mustCall((chunk) => { - stream.push(chunk); + stream.push(chunk) }) - }); - - stream.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk, undefined); - })); - - stream.write(undefined); + }) + stream.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk, undefined) + }) + ) + stream.write(undefined) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-once-readable-pipe.js b/test/parallel/test-stream-once-readable-pipe.js index 10a7824c75..1f80f3455c 100644 --- a/test/parallel/test-stream-once-readable-pipe.js +++ b/test/parallel/test-stream-once-readable-pipe.js @@ -1,76 +1,76 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') -// This test ensures that if have 'readable' listener +const { Readable, Writable } = require('../../lib/ours/index') // This test ensures that if have 'readable' listener // on Readable instance it will not disrupt the pipe. { - let receivedData = ''; + let receivedData = '' const w = new Writable({ write: (chunk, env, callback) => { - receivedData += chunk; - callback(); - }, - }); - - const data = ['foo', 'bar', 'baz']; + receivedData += chunk + callback() + } + }) + const data = ['foo', 'bar', 'baz'] const r = new Readable({ - read: () => {}, - }); - - r.once('readable', common.mustCall()); - - r.pipe(w); - r.push(data[0]); - r.push(data[1]); - r.push(data[2]); - r.push(null); - - w.on('finish', common.mustCall(() => { - assert.strictEqual(receivedData, data.join('')); - })); + read: () => {} + }) + r.once('readable', common.mustCall()) + r.pipe(w) + r.push(data[0]) + r.push(data[1]) + r.push(data[2]) + r.push(null) + w.on( + 'finish', + common.mustCall(() => { + assert.strictEqual(receivedData, data.join('')) + }) + ) } - { - let receivedData = ''; + let receivedData = '' const w = new Writable({ write: (chunk, env, callback) => { - receivedData += chunk; - callback(); - }, - }); - - const data = ['foo', 'bar', 'baz']; + receivedData += chunk + callback() + } + }) + const data = ['foo', 'bar', 'baz'] const r = new Readable({ - read: () => {}, - }); - - r.pipe(w); - r.push(data[0]); - r.push(data[1]); - r.push(data[2]); - r.push(null); - r.once('readable', common.mustCall()); - - w.on('finish', common.mustCall(() => { - assert.strictEqual(receivedData, data.join('')); - })); + read: () => {} + }) + r.pipe(w) + r.push(data[0]) + r.push(data[1]) + r.push(data[2]) + r.push(null) + r.once('readable', common.mustCall()) + w.on( + 'finish', + common.mustCall(() => { + assert.strictEqual(receivedData, data.join('')) + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-passthrough-drain.js b/test/parallel/test-stream-passthrough-drain.js index e288bccc73..6b1813c257 100644 --- a/test/parallel/test-stream-passthrough-drain.js +++ b/test/parallel/test-stream-passthrough-drain.js @@ -1,23 +1,29 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { PassThrough } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -const pt = new PassThrough({ highWaterMark: 0 }); -pt.on('drain', common.mustCall()); -pt.write('hello'); -pt.read(); + error() {} +} +const common = require('../common') - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const { PassThrough } = require('../../lib/ours/index') + +const pt = new PassThrough({ + highWaterMark: 0 +}) +pt.on('drain', common.mustCall()) +pt.write('hello') +pt.read() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js index 1370dcdc93..0825d5a0e4 100644 --- a/test/parallel/test-stream-pipe-after-end.js +++ b/test/parallel/test-stream-pipe-after-end.js @@ -18,67 +18,70 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable, Writable } = require('../../lib/ours/index') class TestReadable extends Readable { constructor(opt) { - super(opt); - this._ended = false; + super(opt) + this._ended = false } _read() { - if (this._ended) - this.emit('error', new Error('_read called twice')); - this._ended = true; - this.push(null); + if (this._ended) this.emit('error', new Error('_read called twice')) + this._ended = true + this.push(null) } } class TestWritable extends Writable { constructor(opt) { - super(opt); - this._written = []; + super(opt) + this._written = [] } _write(chunk, encoding, cb) { - this._written.push(chunk); - cb(); - } -} + this._written.push(chunk) -// This one should not emit 'end' until we read() from it later. -const ender = new TestReadable(); + cb() + } +} // This one should not emit 'end' until we read() from it later. -// What happens when you pipe() a Readable that's already ended? -const piper = new TestReadable(); -// pushes EOF null, and length=0, so this will trigger 'end' -piper.read(); +const ender = new TestReadable() // What happens when you pipe() a Readable that's already ended? -setTimeout(common.mustCall(function() { - ender.on('end', common.mustCall()); - const c = ender.read(); - assert.strictEqual(c, null); +const piper = new TestReadable() // pushes EOF null, and length=0, so this will trigger 'end' - const w = new TestWritable(); - w.on('finish', common.mustCall()); - piper.pipe(w); -}), 1); +piper.read() +setTimeout( + common.mustCall(function () { + ender.on('end', common.mustCall()) + const c = ender.read() + assert.strictEqual(c, null) + const w = new TestWritable() + w.on('finish', common.mustCall()) + piper.pipe(w) + }), + 1 +) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js index 4f0f2820a1..9546d06a70 100644 --- a/test/parallel/test-stream-pipe-await-drain-manual-resume.js +++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js @@ -1,90 +1,101 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, -// A consumer stream with a very low highWaterMark, which starts in a state + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') // A consumer stream with a very low highWaterMark, which starts in a state // where it buffers the chunk it receives rather than indicating that they // have been consumed. + const writable = new stream.Writable({ highWaterMark: 5 -}); - -let isCurrentlyBufferingWrites = true; -const queue = []; +}) +let isCurrentlyBufferingWrites = true +const queue = [] writable._write = (chunk, encoding, cb) => { if (isCurrentlyBufferingWrites) - queue.push({ chunk, cb }); - else - cb(); -}; + queue.push({ + chunk, + cb + }) + else cb() +} const readable = new stream.Readable({ read() {} -}); +}) +readable.pipe(writable) +readable.once( + 'pause', + common.mustCall(() => { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + writable, + 'Expected awaitDrainWriters to be a Writable but instead got ' + `${readable._readableState.awaitDrainWriters}` + ) // First pause, resume manually. The next write() to writable will still + // return false, because chunks are still being buffered, so it will increase + // the awaitDrain counter again. -readable.pipe(writable); + process.nextTick( + common.mustCall(() => { + readable.resume() + }) + ) + readable.once( + 'pause', + common.mustCall(() => { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + writable, + '.resume() should not reset the awaitDrainWriters, but instead got ' + + `${readable._readableState.awaitDrainWriters}` + ) // Second pause, handle all chunks from now on. Once all callbacks that + // are currently queued up are handled, the awaitDrain drain counter should + // fall back to 0 and all chunks that are pending on the readable side + // should be flushed. -readable.once('pause', common.mustCall(() => { - assert.strictEqual( - readable._readableState.awaitDrainWriters, - writable, - 'Expected awaitDrainWriters to be a Writable but instead got ' + - `${readable._readableState.awaitDrainWriters}` - ); - // First pause, resume manually. The next write() to writable will still - // return false, because chunks are still being buffered, so it will increase - // the awaitDrain counter again. + isCurrentlyBufferingWrites = false - process.nextTick(common.mustCall(() => { - readable.resume(); - })); + for (const queued of queue) queued.cb() + }) + ) + }) +) +readable.push(Buffer.alloc(100)) // Fill the writable HWM, first 'pause'. - readable.once('pause', common.mustCall(() => { - assert.strictEqual( - readable._readableState.awaitDrainWriters, - writable, - '.resume() should not reset the awaitDrainWriters, but instead got ' + - `${readable._readableState.awaitDrainWriters}` - ); - // Second pause, handle all chunks from now on. Once all callbacks that - // are currently queued up are handled, the awaitDrain drain counter should - // fall back to 0 and all chunks that are pending on the readable side - // should be flushed. - isCurrentlyBufferingWrites = false; - for (const queued of queue) - queued.cb(); - })); -})); +readable.push(Buffer.alloc(100)) // Second 'pause'. -readable.push(Buffer.alloc(100)); // Fill the writable HWM, first 'pause'. -readable.push(Buffer.alloc(100)); // Second 'pause'. -readable.push(Buffer.alloc(100)); // Should get through to the writable. -readable.push(null); +readable.push(Buffer.alloc(100)) // Should get through to the writable. -writable.on('finish', common.mustCall(() => { - assert.strictEqual( - readable._readableState.awaitDrainWriters, - null, - `awaitDrainWriters should be reset to null +readable.push(null) +writable.on( + 'finish', + common.mustCall(() => { + assert.strictEqual( + readable._readableState.awaitDrainWriters, + null, + `awaitDrainWriters should be reset to null after all chunks are written but instead got ${readable._readableState.awaitDrainWriters}` - ); - // Everything okay, all chunks were written. -})); + ) // Everything okay, all chunks were written. + }) +) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js index b98d427f3f..85898e2bdd 100644 --- a/test/parallel/test-stream-pipe-await-drain-push-while-write.js +++ b/test/parallel/test-stream-pipe-await-drain-push-while-write.js @@ -1,51 +1,54 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') const writable = new stream.Writable({ - write: common.mustCall(function(chunk, encoding, cb) { - assert.strictEqual( - readable._readableState.awaitDrainWriters, - null, - ); - - if (chunk.length === 32 * 1024) { // first chunk - readable.push(Buffer.alloc(34 * 1024)); // above hwm + write: common.mustCall(function (chunk, encoding, cb) { + assert.strictEqual(readable._readableState.awaitDrainWriters, null) + + if (chunk.length === 32 * 1024) { + // first chunk + readable.push(Buffer.alloc(34 * 1024)) // above hwm // We should check if awaitDrain counter is increased in the next // tick, because awaitDrain is incremented after this method finished + process.nextTick(() => { - assert.strictEqual(readable._readableState.awaitDrainWriters, writable); - }); + assert.strictEqual(readable._readableState.awaitDrainWriters, writable) + }) } - process.nextTick(cb); + process.nextTick(cb) }, 3) -}); +}) // A readable stream which produces two buffers. + +const bufs = [Buffer.alloc(32 * 1024), Buffer.alloc(33 * 1024)] // above hwm -// A readable stream which produces two buffers. -const bufs = [Buffer.alloc(32 * 1024), Buffer.alloc(33 * 1024)]; // above hwm const readable = new stream.Readable({ - read: function() { + read: function () { while (bufs.length > 0) { - this.push(bufs.shift()); + this.push(bufs.shift()) } } -}); - -readable.pipe(writable); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +readable.pipe(writable) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js index c6dfa9e0ae..48945d621d 100644 --- a/test/parallel/test-stream-pipe-await-drain.js +++ b/test/parallel/test-stream-pipe-await-drain.js @@ -1,82 +1,75 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, -// This is very similar to test-stream-pipe-cleanup-pause.js. + error() {} +} +const common = require('../common') -const reader = new stream.Readable(); -const writer1 = new stream.Writable(); -const writer2 = new stream.Writable(); -const writer3 = new stream.Writable(); +const stream = require('../../lib/ours/index') -// 560000 is chosen here because it is larger than the (default) highWaterMark +const assert = require('assert') // This is very similar to test-stream-pipe-cleanup-pause.js. + +const reader = new stream.Readable() +const writer1 = new stream.Writable() +const writer2 = new stream.Writable() +const writer3 = new stream.Writable() // 560000 is chosen here because it is larger than the (default) highWaterMark // and will cause `.write()` to return false // See: https://github.com/nodejs/node/issues/5820 -const buffer = Buffer.allocUnsafe(560000); -reader._read = () => {}; +const buffer = Buffer.allocUnsafe(560000) -writer1._write = common.mustCall(function(chunk, encoding, cb) { - this.emit('chunk-received'); - process.nextTick(cb); -}, 1); +reader._read = () => {} +writer1._write = common.mustCall(function (chunk, encoding, cb) { + this.emit('chunk-received') + process.nextTick(cb) +}, 1) writer1.once('chunk-received', () => { assert.strictEqual( reader._readableState.awaitDrainWriters.size, 0, - 'awaitDrain initial value should be 0, actual is ' + - reader._readableState.awaitDrainWriters.size - ); + 'awaitDrain initial value should be 0, actual is ' + reader._readableState.awaitDrainWriters.size + ) setImmediate(() => { // This one should *not* get through to writer1 because writer2 is not // "done" processing. - reader.push(buffer); - }); -}); + reader.push(buffer) + }) +}) // A "slow" consumer: -// A "slow" consumer: writer2._write = common.mustCall((chunk, encoding, cb) => { assert.strictEqual( reader._readableState.awaitDrainWriters.size, 1, - 'awaitDrain should be 1 after first push, actual is ' + - reader._readableState.awaitDrainWriters.size - ); - // Not calling cb here to "simulate" slow stream. + 'awaitDrain should be 1 after first push, actual is ' + reader._readableState.awaitDrainWriters.size + ) // Not calling cb here to "simulate" slow stream. // This should be called exactly once, since the first .write() call // will return false. -}, 1); - +}, 1) writer3._write = common.mustCall((chunk, encoding, cb) => { assert.strictEqual( reader._readableState.awaitDrainWriters.size, 2, - 'awaitDrain should be 2 after second push, actual is ' + - reader._readableState.awaitDrainWriters.size - ); - // Not calling cb here to "simulate" slow stream. + 'awaitDrain should be 2 after second push, actual is ' + reader._readableState.awaitDrainWriters.size + ) // Not calling cb here to "simulate" slow stream. // This should be called exactly once, since the first .write() call // will return false. -}, 1); - -reader.pipe(writer1); -reader.pipe(writer2); -reader.pipe(writer3); -reader.push(buffer); +}, 1) +reader.pipe(writer1) +reader.pipe(writer2) +reader.pipe(writer3) +reader.push(buffer) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js index fc411bc399..c924500798 100644 --- a/test/parallel/test-stream-pipe-cleanup-pause.js +++ b/test/parallel/test-stream-pipe-cleanup-pause.js @@ -1,52 +1,53 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -const reader = new stream.Readable(); -const writer1 = new stream.Writable(); -const writer2 = new stream.Writable(); + error() {} +} +const common = require('../common') -// 560000 is chosen here because it is larger than the (default) highWaterMark +const stream = require('../../lib/ours/index') + +const reader = new stream.Readable() +const writer1 = new stream.Writable() +const writer2 = new stream.Writable() // 560000 is chosen here because it is larger than the (default) highWaterMark // and will cause `.write()` to return false // See: https://github.com/nodejs/node/issues/2323 -const buffer = Buffer.allocUnsafe(560000); - -reader._read = () => {}; - -writer1._write = common.mustCall(function(chunk, encoding, cb) { - this.emit('chunk-received'); - cb(); -}, 1); -writer1.once('chunk-received', function() { - reader.unpipe(writer1); - reader.pipe(writer2); - reader.push(buffer); - setImmediate(function() { - reader.push(buffer); - setImmediate(function() { - reader.push(buffer); - }); - }); -}); - -writer2._write = common.mustCall(function(chunk, encoding, cb) { - cb(); -}, 3); - -reader.pipe(writer1); -reader.push(buffer); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +const buffer = Buffer.allocUnsafe(560000) + +reader._read = () => {} + +writer1._write = common.mustCall(function (chunk, encoding, cb) { + this.emit('chunk-received') + cb() +}, 1) +writer1.once('chunk-received', function () { + reader.unpipe(writer1) + reader.pipe(writer2) + reader.push(buffer) + setImmediate(function () { + reader.push(buffer) + setImmediate(function () { + reader.push(buffer) + }) + }) +}) +writer2._write = common.mustCall(function (chunk, encoding, cb) { + cb() +}, 3) +reader.pipe(writer1) +reader.push(buffer) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js index 3fb54ef8c3..7fa1422758 100644 --- a/test/parallel/test-stream-pipe-cleanup.js +++ b/test/parallel/test-stream-pipe-cleanup.js @@ -18,123 +18,134 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -// This test asserts that Stream.prototype.pipe does not leave listeners + error() {} +} // This test asserts that Stream.prototype.pipe does not leave listeners // hanging on the source or dest. -require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + +require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') function Writable() { - this.writable = true; - this.endCalls = 0; - stream.Stream.call(this); + this.writable = true + this.endCalls = 0 + stream.Stream.call(this) +} + +Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype) +Object.setPrototypeOf(Writable, stream.Stream) + +Writable.prototype.end = function () { + this.endCalls++ } -Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype); -Object.setPrototypeOf(Writable, stream.Stream); -Writable.prototype.end = function() { - this.endCalls++; -}; -Writable.prototype.destroy = function() { - this.endCalls++; -}; +Writable.prototype.destroy = function () { + this.endCalls++ +} function Readable() { - this.readable = true; - stream.Stream.call(this); + this.readable = true + stream.Stream.call(this) } -Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype); -Object.setPrototypeOf(Readable, stream.Stream); + +Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype) +Object.setPrototypeOf(Readable, stream.Stream) function Duplex() { - this.readable = true; - Writable.call(this); + this.readable = true + Writable.call(this) } -Object.setPrototypeOf(Duplex.prototype, Writable.prototype); -Object.setPrototypeOf(Duplex, Writable); -let i = 0; -const limit = 100; +Object.setPrototypeOf(Duplex.prototype, Writable.prototype) +Object.setPrototypeOf(Duplex, Writable) +let i = 0 +const limit = 100 +let w = new Writable() +let r -let w = new Writable(); +for (i = 0; i < limit; i++) { + r = new Readable() + r.pipe(w) + r.emit('end') +} -let r; +assert.strictEqual(r.listeners('end').length, 0) +assert.strictEqual(w.endCalls, limit) +w.endCalls = 0 for (i = 0; i < limit; i++) { - r = new Readable(); - r.pipe(w); - r.emit('end'); + r = new Readable() + r.pipe(w) + r.emit('close') } -assert.strictEqual(r.listeners('end').length, 0); -assert.strictEqual(w.endCalls, limit); -w.endCalls = 0; +assert.strictEqual(r.listeners('close').length, 0) +assert.strictEqual(w.endCalls, limit) +w.endCalls = 0 +r = new Readable() for (i = 0; i < limit; i++) { - r = new Readable(); - r.pipe(w); - r.emit('close'); + w = new Writable() + r.pipe(w) + w.emit('close') } -assert.strictEqual(r.listeners('close').length, 0); -assert.strictEqual(w.endCalls, limit); -w.endCalls = 0; +assert.strictEqual(w.listeners('close').length, 0) +r = new Readable() +w = new Writable() +const d = new Duplex() +r.pipe(d) // pipeline A -r = new Readable(); +d.pipe(w) // pipeline B -for (i = 0; i < limit; i++) { - w = new Writable(); - r.pipe(w); - w.emit('close'); -} -assert.strictEqual(w.listeners('close').length, 0); - -r = new Readable(); -w = new Writable(); -const d = new Duplex(); -r.pipe(d); // pipeline A -d.pipe(w); // pipeline B -assert.strictEqual(r.listeners('end').length, 2); // A.onend, A.cleanup -assert.strictEqual(r.listeners('close').length, 2); // A.onclose, A.cleanup -assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup +assert.strictEqual(r.listeners('end').length, 2) // A.onend, A.cleanup + +assert.strictEqual(r.listeners('close').length, 2) // A.onclose, A.cleanup + +assert.strictEqual(d.listeners('end').length, 2) // B.onend, B.cleanup // A.cleanup, B.onclose, B.cleanup -assert.strictEqual(d.listeners('close').length, 3); -assert.strictEqual(w.listeners('end').length, 0); -assert.strictEqual(w.listeners('close').length, 1); // B.cleanup - -r.emit('end'); -assert.strictEqual(d.endCalls, 1); -assert.strictEqual(w.endCalls, 0); -assert.strictEqual(r.listeners('end').length, 0); -assert.strictEqual(r.listeners('close').length, 0); -assert.strictEqual(d.listeners('end').length, 2); // B.onend, B.cleanup -assert.strictEqual(d.listeners('close').length, 2); // B.onclose, B.cleanup -assert.strictEqual(w.listeners('end').length, 0); -assert.strictEqual(w.listeners('close').length, 1); // B.cleanup - -d.emit('end'); -assert.strictEqual(d.endCalls, 1); -assert.strictEqual(w.endCalls, 1); -assert.strictEqual(r.listeners('end').length, 0); -assert.strictEqual(r.listeners('close').length, 0); -assert.strictEqual(d.listeners('end').length, 0); -assert.strictEqual(d.listeners('close').length, 0); -assert.strictEqual(w.listeners('end').length, 0); -assert.strictEqual(w.listeners('close').length, 0); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +assert.strictEqual(d.listeners('close').length, 3) +assert.strictEqual(w.listeners('end').length, 0) +assert.strictEqual(w.listeners('close').length, 1) // B.cleanup + +r.emit('end') +assert.strictEqual(d.endCalls, 1) +assert.strictEqual(w.endCalls, 0) +assert.strictEqual(r.listeners('end').length, 0) +assert.strictEqual(r.listeners('close').length, 0) +assert.strictEqual(d.listeners('end').length, 2) // B.onend, B.cleanup + +assert.strictEqual(d.listeners('close').length, 2) // B.onclose, B.cleanup + +assert.strictEqual(w.listeners('end').length, 0) +assert.strictEqual(w.listeners('close').length, 1) // B.cleanup + +d.emit('end') +assert.strictEqual(d.endCalls, 1) +assert.strictEqual(w.endCalls, 1) +assert.strictEqual(r.listeners('end').length, 0) +assert.strictEqual(r.listeners('close').length, 0) +assert.strictEqual(d.listeners('end').length, 0) +assert.strictEqual(d.listeners('close').length, 0) +assert.strictEqual(w.listeners('end').length, 0) +assert.strictEqual(w.listeners('close').length, 0) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js index 200d561524..3c7b1a2a1a 100644 --- a/test/parallel/test-stream-pipe-error-handling.js +++ b/test/parallel/test-stream-pipe-error-handling.js @@ -18,122 +18,123 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Stream, PassThrough } = require('../../lib/ours/index'); - -{ - const source = new Stream(); - const dest = new Stream(); + error() {} +} +const common = require('../common') - source.pipe(dest); +const assert = require('assert') - let gotErr = null; - source.on('error', function(err) { - gotErr = err; - }); +const { Stream, PassThrough } = require('../../lib/ours/index') - const err = new Error('This stream turned into bacon.'); - source.emit('error', err); - assert.strictEqual(gotErr, err); +{ + const source = new Stream() + const dest = new Stream() + source.pipe(dest) + let gotErr = null + source.on('error', function (err) { + gotErr = err + }) + const err = new Error('This stream turned into bacon.') + source.emit('error', err) + assert.strictEqual(gotErr, err) } - { - const source = new Stream(); - const dest = new Stream(); + const source = new Stream() + const dest = new Stream() + source.pipe(dest) + const err = new Error('This stream turned into bacon.') + let gotErr = null - source.pipe(dest); - - const err = new Error('This stream turned into bacon.'); - - let gotErr = null; try { - source.emit('error', err); + source.emit('error', err) } catch (e) { - gotErr = e; + gotErr = e } - assert.strictEqual(gotErr, err); + assert.strictEqual(gotErr, err) } - { - const R = Stream.Readable; - const W = Stream.Writable; - - const r = new R({ autoDestroy: false }); - const w = new W({ autoDestroy: false }); - let removed = false; - - r._read = common.mustCall(function() { - setTimeout(common.mustCall(function() { - assert(removed); - assert.throws(function() { - w.emit('error', new Error('fail')); - }, /^Error: fail$/); - }), 1); - }); - - w.on('error', myOnError); - r.pipe(w); - w.removeListener('error', myOnError); - removed = true; + const R = Stream.Readable + const W = Stream.Writable + const r = new R({ + autoDestroy: false + }) + const w = new W({ + autoDestroy: false + }) + let removed = false + r._read = common.mustCall(function () { + setTimeout( + common.mustCall(function () { + assert(removed) + assert.throws(function () { + w.emit('error', new Error('fail')) + }, /^Error: fail$/) + }), + 1 + ) + }) + w.on('error', myOnError) + r.pipe(w) + w.removeListener('error', myOnError) + removed = true function myOnError() { - throw new Error('this should not happen'); + throw new Error('this should not happen') } } - { - const R = Stream.Readable; - const W = Stream.Writable; - - const r = new R(); - const w = new W(); - let removed = false; - - r._read = common.mustCall(function() { - setTimeout(common.mustCall(function() { - assert(removed); - w.emit('error', new Error('fail')); - }), 1); - }); - - w.on('error', common.mustCall()); - w._write = () => {}; - - r.pipe(w); - // Removing some OTHER random listener should not do anything - w.removeListener('error', () => {}); - removed = true; + const R = Stream.Readable + const W = Stream.Writable + const r = new R() + const w = new W() + let removed = false + r._read = common.mustCall(function () { + setTimeout( + common.mustCall(function () { + assert(removed) + w.emit('error', new Error('fail')) + }), + 1 + ) + }) + w.on('error', common.mustCall()) + + w._write = () => {} + + r.pipe(w) // Removing some OTHER random listener should not do anything + + w.removeListener('error', () => {}) + removed = true } - { - const _err = new Error('this should be handled'); - const destination = new PassThrough(); - destination.once('error', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - - const stream = new Stream(); - stream - .pipe(destination); - - destination.destroy(_err); + const _err = new Error('this should be handled') + + const destination = new PassThrough() + destination.once( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + const stream = new Stream() + stream.pipe(destination) + destination.destroy(_err) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-error-unhandled.js b/test/parallel/test-stream-pipe-error-unhandled.js index 1fc1381c9a..557ed6b39b 100644 --- a/test/parallel/test-stream-pipe-error-unhandled.js +++ b/test/parallel/test-stream-pipe-error-unhandled.js @@ -1,36 +1,43 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -process.on('uncaughtException', common.mustCall((err) => { - assert.strictEqual(err.message, 'asd'); -})); + error() {} +} +const common = require('../common') +const assert = require('assert') + +const { Readable, Writable } = require('../../lib/ours/index') + +process.on( + 'uncaughtException', + common.mustCall((err) => { + assert.strictEqual(err.message, 'asd') + }) +) const r = new Readable({ read() { - this.push('asd'); + this.push('asd') } -}); +}) const w = new Writable({ autoDestroy: true, + write() {} -}); - -r.pipe(w); -w.destroy(new Error('asd')); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +r.pipe(w) +w.destroy(new Error('asd')) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js index f78cb1dfca..e997c9df4c 100644 --- a/test/parallel/test-stream-pipe-event.js +++ b/test/parallel/test-stream-pipe-event.js @@ -18,49 +18,51 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') function Writable() { - this.writable = true; - stream.Stream.call(this); + this.writable = true + stream.Stream.call(this) } -Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype); -Object.setPrototypeOf(Writable, stream.Stream); + +Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype) +Object.setPrototypeOf(Writable, stream.Stream) function Readable() { - this.readable = true; - stream.Stream.call(this); + this.readable = true + stream.Stream.call(this) } -Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype); -Object.setPrototypeOf(Readable, stream.Stream); - -let passed = false; - -const w = new Writable(); -w.on('pipe', function(src) { - passed = true; -}); - -const r = new Readable(); -r.pipe(w); -assert.ok(passed); +Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype) +Object.setPrototypeOf(Readable, stream.Stream) +let passed = false +const w = new Writable() +w.on('pipe', function (src) { + passed = true +}) +const r = new Readable() +r.pipe(w) +assert.ok(passed) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-flow-after-unpipe.js b/test/parallel/test-stream-pipe-flow-after-unpipe.js index d72de00eb7..d598f48214 100644 --- a/test/parallel/test-stream-pipe-flow-after-unpipe.js +++ b/test/parallel/test-stream-pipe-flow-after-unpipe.js @@ -1,44 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Readable, Writable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// Tests that calling .unpipe() un-blocks a stream that is paused because + error() {} +} +const common = require('../common') + +const { Readable, Writable } = require('../../lib/ours/index') // Tests that calling .unpipe() un-blocks a stream that is paused because // it is waiting on the writable side to finish a write(). const rs = new Readable({ highWaterMark: 1, // That this gets called at least 20 times is the real test here. read: common.mustCallAtLeast(() => rs.push('foo'), 20) -}); - +}) const ws = new Writable({ highWaterMark: 1, write: common.mustCall(() => { // Ignore the callback, this write() simply never finishes. - setImmediate(() => rs.unpipe(ws)); + setImmediate(() => rs.unpipe(ws)) }) -}); - -let chunks = 0; -rs.on('data', common.mustCallAtLeast(() => { - chunks++; - if (chunks >= 20) - rs.pause(); // Finish this test. -})); - -rs.pipe(ws); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +let chunks = 0 +rs.on( + 'data', + common.mustCallAtLeast(() => { + chunks++ + if (chunks >= 20) rs.pause() // Finish this test. + }) +) +rs.pipe(ws) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js index 72932b8546..b38571a712 100644 --- a/test/parallel/test-stream-pipe-flow.js +++ b/test/parallel/test-stream-pipe-flow.js @@ -1,105 +1,113 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable, PassThrough } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -{ - let ticks = 17; + error() {} +} +const common = require('../common') + +const assert = require('assert') +const { Readable, Writable, PassThrough } = require('../../lib/ours/index') + +{ + let ticks = 17 const rs = new Readable({ objectMode: true, read: () => { - if (ticks-- > 0) - return process.nextTick(() => rs.push({})); - rs.push({}); - rs.push(null); + if (ticks-- > 0) return process.nextTick(() => rs.push({})) + rs.push({}) + rs.push(null) } - }); - + }) const ws = new Writable({ highWaterMark: 0, objectMode: true, write: (data, end, cb) => setImmediate(cb) - }); - - rs.on('end', common.mustCall()); - ws.on('finish', common.mustCall()); - rs.pipe(ws); + }) + rs.on('end', common.mustCall()) + ws.on('finish', common.mustCall()) + rs.pipe(ws) } - { - let missing = 8; - + let missing = 8 const rs = new Readable({ objectMode: true, read: () => { - if (missing--) rs.push({}); - else rs.push(null); + if (missing--) rs.push({}) + else rs.push(null) } - }); - + }) const pt = rs - .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })) - .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })); - + .pipe( + new PassThrough({ + objectMode: true, + highWaterMark: 2 + }) + ) + .pipe( + new PassThrough({ + objectMode: true, + highWaterMark: 2 + }) + ) pt.on('end', () => { - wrapper.push(null); - }); - + wrapper.push(null) + }) const wrapper = new Readable({ objectMode: true, read: () => { process.nextTick(() => { - let data = pt.read(); + let data = pt.read() + if (data === null) { pt.once('readable', () => { - data = pt.read(); - if (data !== null) wrapper.push(data); - }); + data = pt.read() + if (data !== null) wrapper.push(data) + }) } else { - wrapper.push(data); + wrapper.push(data) } - }); + }) } - }); - - wrapper.resume(); - wrapper.on('end', common.mustCall()); + }) + wrapper.resume() + wrapper.on('end', common.mustCall()) } - { // Only register drain if there is backpressure. - const rs = new Readable({ read() {} }); - - const pt = rs - .pipe(new PassThrough({ objectMode: true, highWaterMark: 2 })); - assert.strictEqual(pt.listenerCount('drain'), 0); + const rs = new Readable({ + read() {} + }) + const pt = rs.pipe( + new PassThrough({ + objectMode: true, + highWaterMark: 2 + }) + ) + assert.strictEqual(pt.listenerCount('drain'), 0) pt.on('finish', () => { - assert.strictEqual(pt.listenerCount('drain'), 0); - }); - - rs.push('asd'); - assert.strictEqual(pt.listenerCount('drain'), 0); - + assert.strictEqual(pt.listenerCount('drain'), 0) + }) + rs.push('asd') + assert.strictEqual(pt.listenerCount('drain'), 0) process.nextTick(() => { - rs.push('asd'); - assert.strictEqual(pt.listenerCount('drain'), 0); - rs.push(null); - assert.strictEqual(pt.listenerCount('drain'), 0); - }); + rs.push('asd') + assert.strictEqual(pt.listenerCount('drain'), 0) + rs.push(null) + assert.strictEqual(pt.listenerCount('drain'), 0) + }) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js index 8abbb6ddd0..c95c9d98d1 100644 --- a/test/parallel/test-stream-pipe-manual-resume.js +++ b/test/parallel/test-stream-pipe-manual-resume.js @@ -1,50 +1,51 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') function test(throwCodeInbetween) { // Check that a pipe does not stall if .read() is called unexpectedly // (i.e. the stream is not resumed by the pipe). - - const n = 1000; - let counter = n; + const n = 1000 + let counter = n const rs = stream.Readable({ objectMode: true, read: common.mustCallAtLeast(() => { if (--counter >= 0) - rs.push({ counter }); - else - rs.push(null); + rs.push({ + counter + }) + else rs.push(null) }, n) - }); - + }) const ws = stream.Writable({ objectMode: true, write: common.mustCall((data, enc, cb) => { - setImmediate(cb); + setImmediate(cb) }, n) - }); - - setImmediate(() => throwCodeInbetween(rs, ws)); - - rs.pipe(ws); + }) + setImmediate(() => throwCodeInbetween(rs, ws)) + rs.pipe(ws) } -test((rs) => rs.read()); -test((rs) => rs.resume()); -test(() => 0); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +test((rs) => rs.read()) +test((rs) => rs.resume()) +test(() => 0) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js index 55f7b7203d..2163ea8f38 100644 --- a/test/parallel/test-stream-pipe-multiple-pipes.js +++ b/test/parallel/test-stream-pipe-multiple-pipes.js @@ -1,66 +1,69 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') const readable = new stream.Readable({ read: () => {} -}); - -const writables = []; +}) +const writables = [] for (let i = 0; i < 5; i++) { const target = new stream.Writable({ write: common.mustCall((chunk, encoding, callback) => { - target.output.push(chunk); - callback(); + target.output.push(chunk) + callback() }, 1) - }); - target.output = []; - - target.on('pipe', common.mustCall()); - readable.pipe(target); - - - writables.push(target); + }) + target.output = [] + target.on('pipe', common.mustCall()) + readable.pipe(target) + writables.push(target) } -const input = Buffer.from([1, 2, 3, 4, 5]); - -readable.push(input); - -// The pipe() calls will postpone emission of the 'resume' event using nextTick, +const input = Buffer.from([1, 2, 3, 4, 5]) +readable.push(input) // The pipe() calls will postpone emission of the 'resume' event using nextTick, // so no data will be available to the writable streams until then. -process.nextTick(common.mustCall(() => { - for (const target of writables) { - assert.deepStrictEqual(target.output, [input]); - - target.on('unpipe', common.mustCall()); - readable.unpipe(target); - } - readable.push('something else'); // This does not get through. - readable.push(null); - readable.resume(); // Make sure the 'end' event gets emitted. -})); +process.nextTick( + common.mustCall(() => { + for (const target of writables) { + assert.deepStrictEqual(target.output, [input]) + target.on('unpipe', common.mustCall()) + readable.unpipe(target) + } -readable.on('end', common.mustCall(() => { - for (const target of writables) { - assert.deepStrictEqual(target.output, [input]); - } -})); + readable.push('something else') // This does not get through. - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); + readable.push(null) + readable.resume() // Make sure the 'end' event gets emitted. + }) +) +readable.on( + 'end', + common.mustCall(() => { + for (const target of writables) { + assert.deepStrictEqual(target.output, [input]) } - }); - /* replacement end */ + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-needDrain.js b/test/parallel/test-stream-pipe-needDrain.js index 1da99efba5..09f51bba73 100644 --- a/test/parallel/test-stream-pipe-needDrain.js +++ b/test/parallel/test-stream-pipe-needDrain.js @@ -1,46 +1,47 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable, Writable } = require('../../lib/ours/index') // Pipe should pause temporarily if writable needs drain. -// Pipe should pause temporarily if writable needs drain. { const w = new Writable({ write(buf, encoding, callback) { - process.nextTick(callback); + process.nextTick(callback) }, + highWaterMark: 1 - }); + }) while (w.write('asd')); - assert.strictEqual(w.writableNeedDrain, true); - + assert.strictEqual(w.writableNeedDrain, true) const r = new Readable({ read() { - this.push('asd'); - this.push(null); + this.push('asd') + this.push(null) } - }); - - r.on('pause', common.mustCall(2)); - r.on('end', common.mustCall()); - - r.pipe(w); + }) + r.on('pause', common.mustCall(2)) + r.on('end', common.mustCall()) + r.pipe(w) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js index 4283c69a77..9d69e27011 100644 --- a/test/parallel/test-stream-pipe-same-destination-twice.js +++ b/test/parallel/test-stream-pipe-same-destination-twice.js @@ -1,93 +1,81 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -// Regression test for https://github.com/nodejs/node/issues/12718. + error() {} +} +const common = require('../common') // Regression test for https://github.com/nodejs/node/issues/12718. // Tests that piping a source stream twice to the same destination stream // works, and that a subsequent unpipe() call only removes the pipe *once*. -const assert = require('assert'); -const { PassThrough, Writable } = require('../../lib/ours/index'); + +const assert = require('assert') + +const { PassThrough, Writable } = require('../../lib/ours/index') { - const passThrough = new PassThrough(); + const passThrough = new PassThrough() const dest = new Writable({ write: common.mustCall((chunk, encoding, cb) => { - assert.strictEqual(`${chunk}`, 'foobar'); - cb(); + assert.strictEqual(`${chunk}`, 'foobar') + cb() }) - }); - - passThrough.pipe(dest); - passThrough.pipe(dest); - - assert.strictEqual(passThrough._events.data.length, 2); - assert.strictEqual(passThrough._readableState.pipes.length, 2); - assert.strictEqual(passThrough._readableState.pipes[0], dest); - assert.strictEqual(passThrough._readableState.pipes[1], dest); - - passThrough.unpipe(dest); - - assert.strictEqual(passThrough._events.data.length, 1); - assert.strictEqual(passThrough._readableState.pipes.length, 1); - assert.deepStrictEqual(passThrough._readableState.pipes, [dest]); - - passThrough.write('foobar'); - passThrough.pipe(dest); + }) + passThrough.pipe(dest) + passThrough.pipe(dest) + assert.strictEqual(passThrough._events.data.length, 2) + assert.strictEqual(passThrough._readableState.pipes.length, 2) + assert.strictEqual(passThrough._readableState.pipes[0], dest) + assert.strictEqual(passThrough._readableState.pipes[1], dest) + passThrough.unpipe(dest) + assert.strictEqual(passThrough._events.data.length, 1) + assert.strictEqual(passThrough._readableState.pipes.length, 1) + assert.deepStrictEqual(passThrough._readableState.pipes, [dest]) + passThrough.write('foobar') + passThrough.pipe(dest) } - { - const passThrough = new PassThrough(); + const passThrough = new PassThrough() const dest = new Writable({ write: common.mustCall((chunk, encoding, cb) => { - assert.strictEqual(`${chunk}`, 'foobar'); - cb(); + assert.strictEqual(`${chunk}`, 'foobar') + cb() }, 2) - }); - - passThrough.pipe(dest); - passThrough.pipe(dest); - - assert.strictEqual(passThrough._events.data.length, 2); - assert.strictEqual(passThrough._readableState.pipes.length, 2); - assert.strictEqual(passThrough._readableState.pipes[0], dest); - assert.strictEqual(passThrough._readableState.pipes[1], dest); - - passThrough.write('foobar'); + }) + passThrough.pipe(dest) + passThrough.pipe(dest) + assert.strictEqual(passThrough._events.data.length, 2) + assert.strictEqual(passThrough._readableState.pipes.length, 2) + assert.strictEqual(passThrough._readableState.pipes[0], dest) + assert.strictEqual(passThrough._readableState.pipes[1], dest) + passThrough.write('foobar') } - { - const passThrough = new PassThrough(); + const passThrough = new PassThrough() const dest = new Writable({ write: common.mustNotCall() - }); - - passThrough.pipe(dest); - passThrough.pipe(dest); - - assert.strictEqual(passThrough._events.data.length, 2); - assert.strictEqual(passThrough._readableState.pipes.length, 2); - assert.strictEqual(passThrough._readableState.pipes[0], dest); - assert.strictEqual(passThrough._readableState.pipes[1], dest); - - passThrough.unpipe(dest); - passThrough.unpipe(dest); - - assert.strictEqual(passThrough._events.data, undefined); - assert.strictEqual(passThrough._readableState.pipes.length, 0); - - passThrough.write('foobar'); + }) + passThrough.pipe(dest) + passThrough.pipe(dest) + assert.strictEqual(passThrough._events.data.length, 2) + assert.strictEqual(passThrough._readableState.pipes.length, 2) + assert.strictEqual(passThrough._readableState.pipes[0], dest) + assert.strictEqual(passThrough._readableState.pipes[1], dest) + passThrough.unpipe(dest) + passThrough.unpipe(dest) + assert.strictEqual(passThrough._events.data, undefined) + assert.strictEqual(passThrough._readableState.pipes.length, 0) + passThrough.write('foobar') } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js index bf011ff81b..759bacf50e 100644 --- a/test/parallel/test-stream-pipe-unpipe-streams.js +++ b/test/parallel/test-stream-pipe-unpipe-streams.js @@ -1,111 +1,113 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); - -const { Readable, Writable } = require('../../lib/ours/index'); - -const source = Readable({ read: () => {} }); -const dest1 = Writable({ write: () => {} }); -const dest2 = Writable({ write: () => {} }); - -source.pipe(dest1); -source.pipe(dest2); - -dest1.on('unpipe', common.mustCall()); -dest2.on('unpipe', common.mustCall()); - -assert.strictEqual(source._readableState.pipes[0], dest1); -assert.strictEqual(source._readableState.pipes[1], dest2); -assert.strictEqual(source._readableState.pipes.length, 2); - -// Should be able to unpipe them in the reverse order that they were piped. - -source.unpipe(dest2); - -assert.deepStrictEqual(source._readableState.pipes, [dest1]); -assert.notStrictEqual(source._readableState.pipes, dest2); - -dest2.on('unpipe', common.mustNotCall()); -source.unpipe(dest2); - -source.unpipe(dest1); - -assert.strictEqual(source._readableState.pipes.length, 0); +const silentConsole = { + log() {}, + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable, Writable } = require('../../lib/ours/index') + +const source = Readable({ + read: () => {} +}) +const dest1 = Writable({ + write: () => {} +}) +const dest2 = Writable({ + write: () => {} +}) +source.pipe(dest1) +source.pipe(dest2) +dest1.on('unpipe', common.mustCall()) +dest2.on('unpipe', common.mustCall()) +assert.strictEqual(source._readableState.pipes[0], dest1) +assert.strictEqual(source._readableState.pipes[1], dest2) +assert.strictEqual(source._readableState.pipes.length, 2) // Should be able to unpipe them in the reverse order that they were piped. + +source.unpipe(dest2) +assert.deepStrictEqual(source._readableState.pipes, [dest1]) +assert.notStrictEqual(source._readableState.pipes, dest2) +dest2.on('unpipe', common.mustNotCall()) +source.unpipe(dest2) +source.unpipe(dest1) +assert.strictEqual(source._readableState.pipes.length, 0) { // Test `cleanup()` if we unpipe all streams. - const source = Readable({ read: () => {} }); - const dest1 = Writable({ write: () => {} }); - const dest2 = Writable({ write: () => {} }); - - let destCount = 0; - const srcCheckEventNames = ['end', 'data']; - const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe']; - + const source = Readable({ + read: () => {} + }) + const dest1 = Writable({ + write: () => {} + }) + const dest2 = Writable({ + write: () => {} + }) + let destCount = 0 + const srcCheckEventNames = ['end', 'data'] + const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe'] const checkSrcCleanup = common.mustCall(() => { - assert.strictEqual(source._readableState.pipes.length, 0); - assert.strictEqual(source._readableState.flowing, false); - + assert.strictEqual(source._readableState.pipes.length, 0) + assert.strictEqual(source._readableState.flowing, false) srcCheckEventNames.forEach((eventName) => { - assert.strictEqual( - source.listenerCount(eventName), 0, - `source's '${eventName}' event listeners not removed` - ); - }); - }); + assert.strictEqual(source.listenerCount(eventName), 0, `source's '${eventName}' event listeners not removed`) + }) + }) function checkDestCleanup(dest) { - const currentDestId = ++destCount; - source.pipe(dest); - + const currentDestId = ++destCount + source.pipe(dest) const unpipeChecker = common.mustCall(() => { assert.deepStrictEqual( - dest.listeners('unpipe'), [unpipeChecker], - `destination{${currentDestId}} should have a 'unpipe' event ` + - 'listener which is `unpipeChecker`' - ); - dest.removeListener('unpipe', unpipeChecker); + dest.listeners('unpipe'), + [unpipeChecker], + `destination{${currentDestId}} should have a 'unpipe' event ` + 'listener which is `unpipeChecker`' + ) + dest.removeListener('unpipe', unpipeChecker) destCheckEventNames.forEach((eventName) => { assert.strictEqual( - dest.listenerCount(eventName), 0, - `destination{${currentDestId}}'s '${eventName}' event ` + - 'listeners not removed' - ); - }); - - if (--destCount === 0) - checkSrcCleanup(); - }); - - dest.on('unpipe', unpipeChecker); + dest.listenerCount(eventName), + 0, + `destination{${currentDestId}}'s '${eventName}' event ` + 'listeners not removed' + ) + }) + if (--destCount === 0) checkSrcCleanup() + }) + dest.on('unpipe', unpipeChecker) } - checkDestCleanup(dest1); - checkDestCleanup(dest2); - source.unpipe(); + checkDestCleanup(dest1) + checkDestCleanup(dest2) + source.unpipe() } - { - const src = Readable({ read: () => {} }); - const dst = Writable({ write: () => {} }); - src.pipe(dst); - src.on('resume', common.mustCall(() => { - src.on('pause', common.mustCall()); - src.unpipe(dst); - })); + const src = Readable({ + read: () => {} + }) + const dst = Writable({ + write: () => {} + }) + src.pipe(dst) + src.on( + 'resume', + common.mustCall(() => { + src.on('pause', common.mustCall()) + src.unpipe(dst) + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js index 91c8beed6e..2db82dcb27 100644 --- a/test/parallel/test-stream-pipe-without-listenerCount.js +++ b/test/parallel/test-stream-pipe-without-listenerCount.js @@ -1,32 +1,34 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -const r = new stream.Stream(); -r.listenerCount = undefined; + error() {} +} +const common = require('../common') -const w = new stream.Stream(); -w.listenerCount = undefined; +const stream = require('../../lib/ours/index') -w.on('pipe', function() { - r.emit('error', new Error('Readable Error')); - w.emit('error', new Error('Writable Error')); -}); -r.on('error', common.mustCall()); -w.on('error', common.mustCall()); -r.pipe(w); +const r = new stream.Stream() +r.listenerCount = undefined +const w = new stream.Stream() +w.listenerCount = undefined +w.on('pipe', function () { + r.emit('error', new Error('Readable Error')) + w.emit('error', new Error('Writable Error')) +}) +r.on('error', common.mustCall()) +w.on('error', common.mustCall()) +r.pipe(w) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-async-iterator.js b/test/parallel/test-stream-pipeline-async-iterator.js index 49bda68161..e45e01e658 100644 --- a/test/parallel/test-stream-pipeline-async-iterator.js +++ b/test/parallel/test-stream-pipeline-async-iterator.js @@ -1,46 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable, PassThrough, pipeline } = require('../../lib/ours/index'); -const assert = require('assert'); - -const _err = new Error('kaboom'); + error() {} +} +const common = require('../common') -async function run() { - const source = new Readable({ - read() { - } - }); - source.push('hello'); - source.push('world'); +const { Readable, PassThrough, pipeline } = require('../../lib/ours/index') - setImmediate(() => { source.destroy(_err); }); +const assert = require('assert') - const iterator = pipeline( - source, - new PassThrough(), - () => {}); +const _err = new Error('kaboom') - iterator.setEncoding('utf8'); +async function run() { + const source = new Readable({ + read() {} + }) + source.push('hello') + source.push('world') + setImmediate(() => { + source.destroy(_err) + }) + const iterator = pipeline(source, new PassThrough(), () => {}) + iterator.setEncoding('utf8') for await (const k of iterator) { - assert.strictEqual(k, 'helloworld'); + assert.strictEqual(k, 'helloworld') } } -run().catch(common.mustCall((err) => assert.strictEqual(err, _err))); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +run().catch(common.mustCall((err) => assert.strictEqual(err, _err))) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-http2.js b/test/parallel/test-stream-pipeline-http2.js index 97cb73b26c..e494dde27c 100644 --- a/test/parallel/test-stream-pipeline-http2.js +++ b/test/parallel/test-stream-pipeline-http2.js @@ -1,51 +1,57 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -if (!common.hasCrypto) - common.skip('missing crypto'); -const { Readable, pipeline } = require('../../lib/ours/index'); -const http2 = require('http2'); + error() {} +} +const common = require('../common') + +if (!common.hasCrypto) common.skip('missing crypto') + +const { Readable, pipeline } = require('../../lib/ours/index') + +const http2 = require('http2') { const server = http2.createServer((req, res) => { - pipeline(req, res, common.mustCall()); - }); - + pipeline(req, res, common.mustCall()) + }) server.listen(0, () => { - const url = `http://localhost:${server.address().port}`; - const client = http2.connect(url); - const req = client.request({ ':method': 'POST' }); - + const url = `http://localhost:${server.address().port}` + const client = http2.connect(url) + const req = client.request({ + ':method': 'POST' + }) const rs = new Readable({ read() { - rs.push('hello'); + rs.push('hello') } - }); - - pipeline(rs, req, common.mustCall((err) => { - server.close(); - client.close(); - })); - - let cnt = 10; + }) + pipeline( + rs, + req, + common.mustCall((err) => { + server.close() + client.close() + }) + ) + let cnt = 10 req.on('data', (data) => { - cnt--; - if (cnt === 0) rs.destroy(); - }); - }); + cnt-- + if (cnt === 0) rs.destroy() + }) + }) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-listeners.js b/test/parallel/test-stream-pipeline-listeners.js index 0c0de2b633..214d8f09ba 100644 --- a/test/parallel/test-stream-pipeline-listeners.js +++ b/test/parallel/test-stream-pipeline-listeners.js @@ -1,91 +1,103 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { pipeline, Duplex, PassThrough, Writable } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -process.on('uncaughtException', common.mustCall((err) => { - assert.strictEqual(err.message, 'no way'); -}, 2)); +const { pipeline, Duplex, PassThrough, Writable } = require('../../lib/ours/index') -// Ensure that listeners is removed if last stream is readable +const assert = require('assert') + +process.on( + 'uncaughtException', + common.mustCall((err) => { + assert.strictEqual(err.message, 'no way') + }, 2) +) // Ensure that listeners is removed if last stream is readable // And other stream's listeners unchanged -const a = new PassThrough(); -a.end('foobar'); + +const a = new PassThrough() +a.end('foobar') const b = new Duplex({ write(chunk, encoding, callback) { - callback(); - } -}); -pipeline(a, b, common.mustCall((error) => { - if (error) { - assert.ifError(error); + callback() } +}) +pipeline( + a, + b, + common.mustCall((error) => { + if (error) { + assert.ifError(error) + } - assert(a.listenerCount('error') > 0); - assert.strictEqual(b.listenerCount('error'), 0); - setTimeout(() => { - assert.strictEqual(b.listenerCount('error'), 0); - b.destroy(new Error('no way')); - }, 100); -})); + assert(a.listenerCount('error') > 0) + assert.strictEqual(b.listenerCount('error'), 0) + setTimeout(() => { + assert.strictEqual(b.listenerCount('error'), 0) + b.destroy(new Error('no way')) + }, 100) + }) +) // Async generators -// Async generators -const c = new PassThrough(); -c.end('foobar'); +const c = new PassThrough() +c.end('foobar') const d = pipeline( c, async function* (source) { for await (const chunk of source) { - yield String(chunk).toUpperCase(); + yield String(chunk).toUpperCase() } }, common.mustCall((error) => { if (error) { - assert.ifError(error); + assert.ifError(error) } - assert(c.listenerCount('error') > 0); - assert.strictEqual(d.listenerCount('error'), 0); + assert(c.listenerCount('error') > 0) + assert.strictEqual(d.listenerCount('error'), 0) setTimeout(() => { - assert.strictEqual(b.listenerCount('error'), 0); - d.destroy(new Error('no way')); - }, 100); + assert.strictEqual(b.listenerCount('error'), 0) + d.destroy(new Error('no way')) + }, 100) }) -); +) // If last stream is not readable, will not throw and remove listeners -// If last stream is not readable, will not throw and remove listeners -const e = new PassThrough(); -e.end('foobar'); +const e = new PassThrough() +e.end('foobar') const f = new Writable({ write(chunk, encoding, callback) { - callback(); - } -}); -pipeline(e, f, common.mustCall((error) => { - if (error) { - assert.ifError(error); + callback() } +}) +pipeline( + e, + f, + common.mustCall((error) => { + if (error) { + assert.ifError(error) + } - assert(e.listenerCount('error') > 0); - assert(f.listenerCount('error') > 0); - setTimeout(() => { - assert(f.listenerCount('error') > 0); - f.destroy(new Error('no way')); - }, 100); -})); + assert(e.listenerCount('error') > 0) + assert(f.listenerCount('error') > 0) + setTimeout(() => { + assert(f.listenerCount('error') > 0) + f.destroy(new Error('no way')) + }, 100) + }) +) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-process.js b/test/parallel/test-stream-pipeline-process.js index 1b72352e20..f00759c9d4 100644 --- a/test/parallel/test-stream-pipeline-process.js +++ b/test/parallel/test-stream-pipeline-process.js @@ -1,41 +1,39 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const os = require('os'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const os = require('os') if (process.argv[2] === 'child') { - const { pipeline } = require('../../lib/ours/index'); - pipeline( - process.stdin, - process.stdout, - common.mustSucceed() - ); + const { pipeline } = require('../../lib/ours/index') + + pipeline(process.stdin, process.stdout, common.mustSucceed()) } else { - const cp = require('child_process'); - cp.exec([ - 'echo', - 'hello', - '|', - `"${process.execPath}"`, - `"${__filename}"`, - 'child', - ].join(' '), common.mustSucceed((stdout) => { - assert.strictEqual(stdout.split(os.EOL).shift().trim(), 'hello'); - })); + const cp = require('child_process') + + cp.exec( + ['echo', 'hello', '|', `"${process.execPath}"`, `"${__filename}"`, 'child'].join(' '), + common.mustSucceed((stdout) => { + assert.strictEqual(stdout.split(os.EOL).shift().trim(), 'hello') + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js index 1dab183bc0..dcb7d99b8f 100644 --- a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js +++ b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js @@ -1,54 +1,59 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable, Duplex, pipeline } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// Test that the callback for pipeline() is called even when the ._destroy() + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable, Duplex, pipeline } = require('../../lib/ours/index') // Test that the callback for pipeline() is called even when the ._destroy() // method of the stream places an .end() request to itself that does not // get processed before the destruction of the stream (i.e. the 'close' event). // Refs: https://github.com/nodejs/node/issues/24456 const readable = new Readable({ read: common.mustCall(() => {}) -}); - +}) const duplex = new Duplex({ write(chunk, enc, cb) { // Simulate messages queueing up. }, + read() {}, + destroy(err, cb) { // Call end() from inside the destroy() method, like HTTP/2 streams // do at the time of writing. - this.end(); - cb(err); + this.end() + cb(err) } -}); - -duplex.on('finished', common.mustNotCall()); - -pipeline(readable, duplex, common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); -})); - -// Write one chunk of data, and destroy the stream later. +}) +duplex.on('finished', common.mustNotCall()) +pipeline( + readable, + duplex, + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE') + }) +) // Write one chunk of data, and destroy the stream later. // That should trigger the pipeline destruction. -readable.push('foo'); + +readable.push('foo') setImmediate(() => { - readable.destroy(); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + readable.destroy() +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-uncaught.js b/test/parallel/test-stream-pipeline-uncaught.js index 2399c1ce98..306b7d7bf5 100644 --- a/test/parallel/test-stream-pipeline-uncaught.js +++ b/test/parallel/test-stream-pipeline-uncaught.js @@ -1,37 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - pipeline, - PassThrough -} = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -process.on('uncaughtException', common.mustCall((err) => { - assert.strictEqual(err.message, 'error'); -})); +const { pipeline, PassThrough } = require('../../lib/ours/index') -// Ensure that pipeline that ends with Promise +const assert = require('assert') + +process.on( + 'uncaughtException', + common.mustCall((err) => { + assert.strictEqual(err.message, 'error') + }) +) // Ensure that pipeline that ends with Promise // still propagates error to uncaughtException. -const s = new PassThrough(); -s.end('data'); -pipeline(s, async function(source) { - for await (const chunk of source) { } // eslint-disable-line no-unused-vars, no-empty -}, common.mustSucceed(() => { - throw new Error('error'); -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +const s = new PassThrough() +s.end('data') +pipeline( + s, + async function (source) { + for await (const chunk of source) { + } // eslint-disable-line no-unused-vars, no-empty + }, + common.mustSucceed(() => { + throw new Error('error') + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-pipeline-with-empty-string.js b/test/parallel/test-stream-pipeline-with-empty-string.js index 0bb2a6c79e..4663e9d7c2 100644 --- a/test/parallel/test-stream-pipeline-with-empty-string.js +++ b/test/parallel/test-stream-pipeline-with-empty-string.js @@ -1,33 +1,34 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - pipeline, - PassThrough -} = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') +const { pipeline, PassThrough } = require('../../lib/ours/index') async function runTest() { await pipeline( '', - new PassThrough({ objectMode: true }), - common.mustCall(() => { }) - ); + new PassThrough({ + objectMode: true + }), + common.mustCall(() => {}) + ) } -runTest().then(common.mustCall(() => {})); +runTest().then(common.mustCall(() => {})) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-preprocess.js b/test/parallel/test-stream-preprocess.js index b55cd5780d..0cf027e56d 100644 --- a/test/parallel/test-stream-preprocess.js +++ b/test/parallel/test-stream-preprocess.js @@ -1,75 +1,89 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const fs = require('fs'); -const rl = require('readline'); -const fixtures = require('../common/fixtures'); + error() {} +} +const common = require('../common') -const BOM = '\uFEFF'; +const assert = require('assert') -// Get the data using a non-stream way to compare with the streamed data. -const modelData = fixtures.readSync('file-to-read-without-bom.txt', 'utf8'); -const modelDataFirstCharacter = modelData[0]; +const fs = require('fs') -// Detect the number of forthcoming 'line' events for mustCall() 'expected' arg. -const lineCount = modelData.match(/\n/g).length; +const rl = require('readline') -// Ensure both without-bom and with-bom test files are textwise equal. -assert.strictEqual(fixtures.readSync('file-to-read-with-bom.txt', 'utf8'), - `${BOM}${modelData}` -); +const fixtures = require('../common/fixtures') -// An unjustified BOM stripping with a non-BOM character unshifted to a stream. -const inputWithoutBOM = - fs.createReadStream(fixtures.path('file-to-read-without-bom.txt'), 'utf8'); +const BOM = '\uFEFF' // Get the data using a non-stream way to compare with the streamed data. -inputWithoutBOM.once('readable', common.mustCall(() => { - const maybeBOM = inputWithoutBOM.read(1); - assert.strictEqual(maybeBOM, modelDataFirstCharacter); - assert.notStrictEqual(maybeBOM, BOM); +const modelData = fixtures.readSync('file-to-read-without-bom.txt', 'utf8') +const modelDataFirstCharacter = modelData[0] // Detect the number of forthcoming 'line' events for mustCall() 'expected' arg. - inputWithoutBOM.unshift(maybeBOM); +const lineCount = modelData.match(/\n/g).length // Ensure both without-bom and with-bom test files are textwise equal. - let streamedData = ''; - rl.createInterface({ - input: inputWithoutBOM, - }).on('line', common.mustCall((line) => { - streamedData += `${line}\n`; - }, lineCount)).on('close', common.mustCall(() => { - assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData); - })); -})); +assert.strictEqual(fixtures.readSync('file-to-read-with-bom.txt', 'utf8'), `${BOM}${modelData}`) // An unjustified BOM stripping with a non-BOM character unshifted to a stream. -// A justified BOM stripping. -const inputWithBOM = - fs.createReadStream(fixtures.path('file-to-read-with-bom.txt'), 'utf8'); +const inputWithoutBOM = fs.createReadStream(fixtures.path('file-to-read-without-bom.txt'), 'utf8') +inputWithoutBOM.once( + 'readable', + common.mustCall(() => { + const maybeBOM = inputWithoutBOM.read(1) + assert.strictEqual(maybeBOM, modelDataFirstCharacter) + assert.notStrictEqual(maybeBOM, BOM) + inputWithoutBOM.unshift(maybeBOM) + let streamedData = '' + rl.createInterface({ + input: inputWithoutBOM + }) + .on( + 'line', + common.mustCall((line) => { + streamedData += `${line}\n` + }, lineCount) + ) + .on( + 'close', + common.mustCall(() => { + assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData) + }) + ) + }) +) // A justified BOM stripping. -inputWithBOM.once('readable', common.mustCall(() => { - const maybeBOM = inputWithBOM.read(1); - assert.strictEqual(maybeBOM, BOM); +const inputWithBOM = fs.createReadStream(fixtures.path('file-to-read-with-bom.txt'), 'utf8') +inputWithBOM.once( + 'readable', + common.mustCall(() => { + const maybeBOM = inputWithBOM.read(1) + assert.strictEqual(maybeBOM, BOM) + let streamedData = '' + rl.createInterface({ + input: inputWithBOM + }) + .on( + 'line', + common.mustCall((line) => { + streamedData += `${line}\n` + }, lineCount) + ) + .on( + 'close', + common.mustCall(() => { + assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData) + }) + ) + }) +) +/* replacement start */ - let streamedData = ''; - rl.createInterface({ - input: inputWithBOM, - }).on('line', common.mustCall((line) => { - streamedData += `${line}\n`; - }, lineCount)).on('close', common.mustCall(() => { - assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData); - })); -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-promises.js b/test/parallel/test-stream-promises.js index d5af56f6f6..eb050f6a4d 100644 --- a/test/parallel/test-stream-promises.js +++ b/test/parallel/test-stream-promises.js @@ -1,118 +1,109 @@ +'use strict' - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; - -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const { - Readable, - Writable, - promises, -} = stream; -const { - finished, - pipeline, -} = require('../../lib/stream/promises'); -const fs = require('fs'); -const assert = require('assert'); -const { promisify } = require('util'); - -assert.strictEqual(promises.pipeline, pipeline); -assert.strictEqual(promises.finished, finished); -assert.strictEqual(pipeline, promisify(stream.pipeline)); -assert.strictEqual(finished, promisify(stream.finished)); - -// pipeline success -{ - let finished = false; - const processed = []; - const expected = [ - Buffer.from('a'), - Buffer.from('b'), - Buffer.from('c'), - ]; +const tap = require('tap') - const read = new Readable({ - read() { } - }); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const { Readable, Writable, promises } = stream +const { finished, pipeline } = require('../../lib/stream/promises') + +const fs = require('fs') + +const assert = require('assert') + +const { promisify } = require('util') + +assert.strictEqual(promises.pipeline, pipeline) +assert.strictEqual(promises.finished, finished) +assert.strictEqual(pipeline, promisify(stream.pipeline)) +assert.strictEqual(finished, promisify(stream.finished)) // pipeline success + +{ + let finished = false + const processed = [] + const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] + const read = new Readable({ + read() {} + }) const write = new Writable({ write(data, enc, cb) { - processed.push(data); - cb(); + processed.push(data) + cb() } - }); - + }) write.on('finish', () => { - finished = true; - }); + finished = true + }) for (let i = 0; i < expected.length; i++) { - read.push(expected[i]); + read.push(expected[i]) } - read.push(null); - pipeline(read, write).then(common.mustCall((value) => { - assert.ok(finished); - assert.deepStrictEqual(processed, expected); - })); -} + read.push(null) + pipeline(read, write).then( + common.mustCall((value) => { + assert.ok(finished) + assert.deepStrictEqual(processed, expected) + }) + ) +} // pipeline error -// pipeline error { const read = new Readable({ - read() { } - }); - + read() {} + }) const write = new Writable({ write(data, enc, cb) { - cb(); + cb() } - }); - - read.push('data'); - setImmediate(() => read.destroy()); - - pipeline(read, write).catch(common.mustCall((err) => { - assert.ok(err, 'should have an error'); - })); -} + }) + read.push('data') + setImmediate(() => read.destroy()) + pipeline(read, write).catch( + common.mustCall((err) => { + assert.ok(err, 'should have an error') + }) + ) +} // finished success -// finished success { async function run() { - const rs = fs.createReadStream(__filename); - - let ended = false; - rs.resume(); + const rs = fs.createReadStream(__filename) + let ended = false + rs.resume() rs.on('end', () => { - ended = true; - }); - await finished(rs); - assert(ended); + ended = true + }) + await finished(rs) + assert(ended) } - run().then(common.mustCall()); -} + run().then(common.mustCall()) +} // finished error -// finished error { - const rs = fs.createReadStream('file-does-not-exist'); - - assert.rejects(finished(rs), { - code: 'ENOENT' - }).then(common.mustCall()); + const rs = fs.createReadStream('file-does-not-exist') + assert + .rejects(finished(rs), { + code: 'ENOENT' + }) + .then(common.mustCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js index 144e64310c..d500718981 100644 --- a/test/parallel/test-stream-push-order.js +++ b/test/parallel/test-stream-push-order.js @@ -18,50 +18,52 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const Readable = require('../../lib/ours/index').Readable; -const assert = require('assert'); + error() {} +} +require('../common') + +const Readable = require('../../lib/ours/index').Readable + +const assert = require('assert') const s = new Readable({ highWaterMark: 20, encoding: 'ascii' -}); +}) +const list = ['1', '2', '3', '4', '5', '6'] -const list = ['1', '2', '3', '4', '5', '6']; +s._read = function (n) { + const one = list.shift() -s._read = function(n) { - const one = list.shift(); if (!one) { - s.push(null); + s.push(null) } else { - const two = list.shift(); - s.push(one); - s.push(two); + const two = list.shift() + s.push(one) + s.push(two) } -}; - -s.read(0); +} -// ACTUALLY [1, 3, 5, 6, 4, 2] +s.read(0) // ACTUALLY [1, 3, 5, 6, 4, 2] -process.on('exit', function() { - assert.strictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6'); - silentConsole.log('ok'); -}); +process.on('exit', function () { + assert.strictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6') + silentConsole.log('ok') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js index 3d56745e2b..bb80a8c0f5 100644 --- a/test/parallel/test-stream-push-strings.js +++ b/test/parallel/test-stream-push-strings.js @@ -18,65 +18,71 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') + +const assert = require('assert') -const Readable = require('../../lib/ours/index').Readable; +const Readable = require('../../lib/ours/index').Readable class MyStream extends Readable { constructor(options) { - super(options); - this._chunks = 3; + super(options) + this._chunks = 3 } _read(n) { switch (this._chunks--) { case 0: - return this.push(null); + return this.push(null) + case 1: return setTimeout(() => { - this.push('last chunk'); - }, 100); + this.push('last chunk') + }, 100) + case 2: - return this.push('second to last chunk'); + return this.push('second to last chunk') + case 3: return process.nextTick(() => { - this.push('first chunk'); - }); + this.push('first chunk') + }) + default: - throw new Error('?'); + throw new Error('?') } } } -const ms = new MyStream(); -const results = []; -ms.on('readable', function() { - let chunk; - while (null !== (chunk = ms.read())) - results.push(String(chunk)); -}); +const ms = new MyStream() +const results = [] +ms.on('readable', function () { + let chunk -const expect = [ 'first chunksecond to last chunk', 'last chunk' ]; -process.on('exit', function() { - assert.strictEqual(ms._chunks, -1); - assert.deepStrictEqual(results, expect); - silentConsole.log('ok'); -}); + while (null !== (chunk = ms.read())) results.push(String(chunk)) +}) +const expect = ['first chunksecond to last chunk', 'last chunk'] +process.on('exit', function () { + assert.strictEqual(ms._chunks, -1) + assert.deepStrictEqual(results, expect) + silentConsole.log('ok') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-aborted.js b/test/parallel/test-stream-readable-aborted.js index 56d07c9195..4d271d5942 100644 --- a/test/parallel/test-stream-readable-aborted.js +++ b/test/parallel/test-stream-readable-aborted.js @@ -1,81 +1,81 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable, Duplex } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable, Duplex } = require('../../lib/ours/index') { const readable = new Readable({ - read() { - } - }); - assert.strictEqual(readable.readableAborted, false); - readable.destroy(); - assert.strictEqual(readable.readableAborted, true); + read() {} + }) + assert.strictEqual(readable.readableAborted, false) + readable.destroy() + assert.strictEqual(readable.readableAborted, true) } - { const readable = new Readable({ - read() { - } - }); - assert.strictEqual(readable.readableAborted, false); - readable.push(null); - readable.destroy(); - assert.strictEqual(readable.readableAborted, true); + read() {} + }) + assert.strictEqual(readable.readableAborted, false) + readable.push(null) + readable.destroy() + assert.strictEqual(readable.readableAborted, true) } - { const readable = new Readable({ - read() { - } - }); - assert.strictEqual(readable.readableAborted, false); - readable.push('asd'); - readable.destroy(); - assert.strictEqual(readable.readableAborted, true); + read() {} + }) + assert.strictEqual(readable.readableAborted, false) + readable.push('asd') + readable.destroy() + assert.strictEqual(readable.readableAborted, true) } - { const readable = new Readable({ - read() { - } - }); - assert.strictEqual(readable.readableAborted, false); - readable.push('asd'); - readable.push(null); - assert.strictEqual(readable.readableAborted, false); - readable.on('end', common.mustCall(() => { - assert.strictEqual(readable.readableAborted, false); - readable.destroy(); - assert.strictEqual(readable.readableAborted, false); - queueMicrotask(() => { - assert.strictEqual(readable.readableAborted, false); - }); - })); - readable.resume(); + read() {} + }) + assert.strictEqual(readable.readableAborted, false) + readable.push('asd') + readable.push(null) + assert.strictEqual(readable.readableAborted, false) + readable.on( + 'end', + common.mustCall(() => { + assert.strictEqual(readable.readableAborted, false) + readable.destroy() + assert.strictEqual(readable.readableAborted, false) + queueMicrotask(() => { + assert.strictEqual(readable.readableAborted, false) + }) + }) + ) + readable.resume() } - { const duplex = new Duplex({ readable: false, + write() {} - }); - duplex.destroy(); - assert.strictEqual(duplex.readableAborted, false); + }) + duplex.destroy() + assert.strictEqual(duplex.readableAborted, false) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-add-chunk-during-data.js b/test/parallel/test-stream-readable-add-chunk-during-data.js index 135e09031e..6728287753 100644 --- a/test/parallel/test-stream-readable-add-chunk-during-data.js +++ b/test/parallel/test-stream-readable-add-chunk-during-data.js @@ -1,36 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// Verify that .push() and .unshift() can be called from 'data' listeners. + error() {} +} +const common = require('../common') -for (const method of ['push', 'unshift']) { - const r = new Readable({ read() {} }); - r.once('data', common.mustCall((chunk) => { - assert.strictEqual(r.readableLength, 0); - r[method](chunk); - assert.strictEqual(r.readableLength, chunk.length); +const assert = require('assert') - r.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString(), 'Hello, world'); - })); - })); +const { Readable } = require('../../lib/ours/index') // Verify that .push() and .unshift() can be called from 'data' listeners. - r.push('Hello, world'); +for (const method of ['push', 'unshift']) { + const r = new Readable({ + read() {} + }) + r.once( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(r.readableLength, 0) + r[method](chunk) + assert.strictEqual(r.readableLength, chunk.length) + r.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(), 'Hello, world') + }) + ) + }) + ) + r.push('Hello, world') } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js index 9c15ab26ab..cd5bb31fdd 100644 --- a/test/parallel/test-stream-readable-constructor-set-methods.js +++ b/test/parallel/test-stream-readable-constructor-set-methods.js @@ -1,26 +1,31 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const Readable = require('../../lib/ours/index').Readable; + error() {} +} +const common = require('../common') + +const Readable = require('../../lib/ours/index').Readable const _read = common.mustCall(function _read(n) { - this.push(null); -}); + this.push(null) +}) -const r = new Readable({ read: _read }); -r.resume(); +const r = new Readable({ + read: _read +}) +r.resume() +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-data.js b/test/parallel/test-stream-readable-data.js index eef9a7df7e..80db4f024c 100644 --- a/test/parallel/test-stream-readable-data.js +++ b/test/parallel/test-stream-readable-data.js @@ -1,34 +1,36 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') const readable = new Readable({ read() {} -}); +}) function read() {} -readable.setEncoding('utf8'); -readable.on('readable', read); -readable.removeListener('readable', read); - -process.nextTick(function() { - readable.on('data', common.mustCall()); - readable.push('hello'); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +readable.setEncoding('utf8') +readable.on('readable', read) +readable.removeListener('readable', read) +process.nextTick(function () { + readable.on('data', common.mustCall()) + readable.push('hello') +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js index 6bb37e0ab6..c29282cdbc 100644 --- a/test/parallel/test-stream-readable-destroy.js +++ b/test/parallel/test-stream-readable-destroy.js @@ -1,420 +1,416 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable, addAbortSignal } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Readable, addAbortSignal } = require('../../lib/ours/index') + +const assert = require('assert') { const read = new Readable({ read() {} - }); - read.resume(); - - read.on('close', common.mustCall()); - - read.destroy(); - assert.strictEqual(read.errored, null); - assert.strictEqual(read.destroyed, true); + }) + read.resume() + read.on('close', common.mustCall()) + read.destroy() + assert.strictEqual(read.errored, null) + assert.strictEqual(read.destroyed, true) } - { const read = new Readable({ read() {} - }); - read.resume(); - - const expected = new Error('kaboom'); - - read.on('end', common.mustNotCall('no end event')); - read.on('close', common.mustCall()); - read.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - read.destroy(expected); - assert.strictEqual(read.errored, expected); - assert.strictEqual(read.destroyed, true); + }) + read.resume() + const expected = new Error('kaboom') + read.on('end', common.mustNotCall('no end event')) + read.on('close', common.mustCall()) + read.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + read.destroy(expected) + assert.strictEqual(read.errored, expected) + assert.strictEqual(read.destroyed, true) } - { const read = new Readable({ read() {} - }); - - read._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(err); - }); - - const expected = new Error('kaboom'); - - read.on('end', common.mustNotCall('no end event')); - read.on('close', common.mustCall()); - read.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - read.destroy(expected); - assert.strictEqual(read.destroyed, true); + }) + read._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb(err) + }) + const expected = new Error('kaboom') + read.on('end', common.mustNotCall('no end event')) + read.on('close', common.mustCall()) + read.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + read.destroy(expected) + assert.strictEqual(read.destroyed, true) } - { const read = new Readable({ read() {}, - destroy: common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(); - }) - }); - - const expected = new Error('kaboom'); - - read.on('end', common.mustNotCall('no end event')); - - // Error is swallowed by the custom _destroy - read.on('error', common.mustNotCall('no error event')); - read.on('close', common.mustCall()); - read.destroy(expected); - assert.strictEqual(read.destroyed, true); + destroy: common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb() + }) + }) + const expected = new Error('kaboom') + read.on('end', common.mustNotCall('no end event')) // Error is swallowed by the custom _destroy + + read.on('error', common.mustNotCall('no error event')) + read.on('close', common.mustCall()) + read.destroy(expected) + assert.strictEqual(read.destroyed, true) } - { const read = new Readable({ read() {} - }); - - read._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(); - }); - - read.destroy(); - assert.strictEqual(read.destroyed, true); + }) + read._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb() + }) + read.destroy() + assert.strictEqual(read.destroyed, true) } - { const read = new Readable({ read() {} - }); - read.resume(); - - read._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); + }) + read.resume() + read._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) process.nextTick(() => { - this.push(null); - cb(); - }); - }); - - const fail = common.mustNotCall('no end event'); - - read.on('end', fail); - read.on('close', common.mustCall()); - - read.destroy(); - - read.removeListener('end', fail); - read.on('end', common.mustNotCall()); - assert.strictEqual(read.destroyed, true); + this.push(null) + cb() + }) + }) + const fail = common.mustNotCall('no end event') + read.on('end', fail) + read.on('close', common.mustCall()) + read.destroy() + read.removeListener('end', fail) + read.on('end', common.mustNotCall()) + assert.strictEqual(read.destroyed, true) } - { const read = new Readable({ read() {} - }); - - const expected = new Error('kaboom'); - - read._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(expected); - }); - - let ticked = false; - read.on('end', common.mustNotCall('no end event')); - read.on('error', common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(read._readableState.errorEmitted, true); - assert.strictEqual(read._readableState.errored, expected); - assert.strictEqual(err, expected); - })); - - read.destroy(); - assert.strictEqual(read._readableState.errorEmitted, false); - assert.strictEqual(read._readableState.errored, expected); - assert.strictEqual(read.destroyed, true); - ticked = true; + }) + const expected = new Error('kaboom') + read._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb(expected) + }) + let ticked = false + read.on('end', common.mustNotCall('no end event')) + read.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(read._readableState.errorEmitted, true) + assert.strictEqual(read._readableState.errored, expected) + assert.strictEqual(err, expected) + }) + ) + read.destroy() + assert.strictEqual(read._readableState.errorEmitted, false) + assert.strictEqual(read._readableState.errored, expected) + assert.strictEqual(read.destroyed, true) + ticked = true } - { const read = new Readable({ read() {} - }); - read.resume(); + }) + read.resume() + read.destroyed = true + assert.strictEqual(read.destroyed, true) // The internal destroy() mechanism should not be triggered - read.destroyed = true; - assert.strictEqual(read.destroyed, true); - - // The internal destroy() mechanism should not be triggered - read.on('end', common.mustNotCall()); - read.destroy(); + read.on('end', common.mustNotCall()) + read.destroy() } - { function MyReadable() { - assert.strictEqual(this.destroyed, false); - this.destroyed = false; - Readable.call(this); + assert.strictEqual(this.destroyed, false) + this.destroyed = false + Readable.call(this) } - Object.setPrototypeOf(MyReadable.prototype, Readable.prototype); - Object.setPrototypeOf(MyReadable, Readable); - - new MyReadable(); + Object.setPrototypeOf(MyReadable.prototype, Readable.prototype) + Object.setPrototypeOf(MyReadable, Readable) + new MyReadable() } - { // Destroy and destroy callback const read = new Readable({ read() {} - }); - read.resume(); - - const expected = new Error('kaboom'); - - let ticked = false; - read.on('close', common.mustCall(() => { - assert.strictEqual(read._readableState.errorEmitted, true); - assert.strictEqual(ticked, true); - })); - read.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - assert.strictEqual(read._readableState.errored, null); - assert.strictEqual(read._readableState.errorEmitted, false); - - read.destroy(expected, common.mustCall(function(err) { - assert.strictEqual(read._readableState.errored, expected); - assert.strictEqual(err, expected); - })); - assert.strictEqual(read._readableState.errorEmitted, false); - assert.strictEqual(read._readableState.errored, expected); - ticked = true; + }) + read.resume() + const expected = new Error('kaboom') + let ticked = false + read.on( + 'close', + common.mustCall(() => { + assert.strictEqual(read._readableState.errorEmitted, true) + assert.strictEqual(ticked, true) + }) + ) + read.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + assert.strictEqual(read._readableState.errored, null) + assert.strictEqual(read._readableState.errorEmitted, false) + read.destroy( + expected, + common.mustCall(function (err) { + assert.strictEqual(read._readableState.errored, expected) + assert.strictEqual(err, expected) + }) + ) + assert.strictEqual(read._readableState.errorEmitted, false) + assert.strictEqual(read._readableState.errored, expected) + ticked = true } - { const readable = new Readable({ - destroy: common.mustCall(function(err, cb) { - process.nextTick(cb, new Error('kaboom 1')); + destroy: common.mustCall(function (err, cb) { + process.nextTick(cb, new Error('kaboom 1')) }), - read() {} - }); - - let ticked = false; - readable.on('close', common.mustCall(() => { - assert.strictEqual(ticked, true); - assert.strictEqual(readable._readableState.errorEmitted, true); - })); - readable.on('error', common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(err.message, 'kaboom 1'); - assert.strictEqual(readable._readableState.errorEmitted, true); - })); - readable.destroy(); - assert.strictEqual(readable.destroyed, true); - assert.strictEqual(readable._readableState.errored, null); - assert.strictEqual(readable._readableState.errorEmitted, false); - - // Test case where `readable.destroy()` is called again with an error before + read() {} + }) + let ticked = false + readable.on( + 'close', + common.mustCall(() => { + assert.strictEqual(ticked, true) + assert.strictEqual(readable._readableState.errorEmitted, true) + }) + ) + readable.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(err.message, 'kaboom 1') + assert.strictEqual(readable._readableState.errorEmitted, true) + }) + ) + readable.destroy() + assert.strictEqual(readable.destroyed, true) + assert.strictEqual(readable._readableState.errored, null) + assert.strictEqual(readable._readableState.errorEmitted, false) // Test case where `readable.destroy()` is called again with an error before // the `_destroy()` callback is called. - readable.destroy(new Error('kaboom 2')); - assert.strictEqual(readable._readableState.errorEmitted, false); - assert.strictEqual(readable._readableState.errored, null); - ticked = true; + readable.destroy(new Error('kaboom 2')) + assert.strictEqual(readable._readableState.errorEmitted, false) + assert.strictEqual(readable._readableState.errored, null) + ticked = true } - { const read = new Readable({ read() {} - }); - - read.destroy(); - read.push('hi'); - read.on('data', common.mustNotCall()); + }) + read.destroy() + read.push('hi') + read.on('data', common.mustNotCall()) } - { const read = new Readable({ - read: common.mustNotCall(function() {}) - }); - read.destroy(); - assert.strictEqual(read.destroyed, true); - read.read(); + read: common.mustNotCall(function () {}) + }) + read.destroy() + assert.strictEqual(read.destroyed, true) + read.read() } - { const read = new Readable({ autoDestroy: false, + read() { - this.push(null); - this.push('asd'); + this.push(null) + this.push('asd') } - }); - - read.on('error', common.mustCall(() => { - assert(read._readableState.errored); - })); - read.resume(); + }) + read.on( + 'error', + common.mustCall(() => { + assert(read._readableState.errored) + }) + ) + read.resume() } - { - const controller = new AbortController(); - const read = addAbortSignal(controller.signal, new Readable({ - read() { - this.push('asd'); - }, - })); - - read.on('error', common.mustCall((e) => { - assert.strictEqual(e.name, 'AbortError'); - })); - controller.abort(); - read.on('data', common.mustNotCall()); + const controller = new AbortController() + const read = addAbortSignal( + controller.signal, + new Readable({ + read() { + this.push('asd') + } + }) + ) + read.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError') + }) + ) + controller.abort() + read.on('data', common.mustNotCall()) } - { - const controller = new AbortController(); + const controller = new AbortController() const read = new Readable({ signal: controller.signal, - read() { - this.push('asd'); - }, - }); - - read.on('error', common.mustCall((e) => { - assert.strictEqual(e.name, 'AbortError'); - })); - controller.abort(); - read.on('data', common.mustNotCall()); -} -{ - const controller = new AbortController(); - const read = addAbortSignal(controller.signal, new Readable({ - objectMode: true, read() { - return false; + this.push('asd') } - })); - read.push('asd'); - - read.on('error', common.mustCall((e) => { - assert.strictEqual(e.name, 'AbortError'); - })); - assert.rejects((async () => { - // eslint-disable-next-line no-unused-vars, no-empty - for await (const chunk of read) { } - })(), /AbortError/); - setTimeout(() => controller.abort(), 0); + }) + read.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError') + }) + ) + controller.abort() + read.on('data', common.mustNotCall()) +} +{ + const controller = new AbortController() + const read = addAbortSignal( + controller.signal, + new Readable({ + objectMode: true, + + read() { + return false + } + }) + ) + read.push('asd') + read.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError') + }) + ) + assert.rejects( + (async () => { + // eslint-disable-next-line no-unused-vars, no-empty + for await (const chunk of read) { + } + })(), + /AbortError/ + ) + setTimeout(() => controller.abort(), 0) } - { const read = new Readable({ - read() { - }, - }); - - read.on('data', common.mustNotCall()); - read.on('error', common.mustCall((e) => { - read.push('asd'); - read.read(); - })); - read.on('close', common.mustCall((e) => { - read.push('asd'); - read.read(); - })); - read.destroy(new Error('asd')); + read() {} + }) + read.on('data', common.mustNotCall()) + read.on( + 'error', + common.mustCall((e) => { + read.push('asd') + read.read() + }) + ) + read.on( + 'close', + common.mustCall((e) => { + read.push('asd') + read.read() + }) + ) + read.destroy(new Error('asd')) } - { const read = new Readable({ - read() { - }, - }); - - read.on('data', common.mustNotCall()); - read.on('close', common.mustCall((e) => { - read.push('asd'); - read.read(); - })); - read.destroy(); + read() {} + }) + read.on('data', common.mustNotCall()) + read.on( + 'close', + common.mustCall((e) => { + read.push('asd') + read.read() + }) + ) + read.destroy() } - { const read = new Readable({ - read() { - }, - }); - - read.on('data', common.mustNotCall()); - read.on('close', common.mustCall((e) => { - read.push('asd'); - read.unshift('asd'); - })); - read.destroy(); + read() {} + }) + read.on('data', common.mustNotCall()) + read.on( + 'close', + common.mustCall((e) => { + read.push('asd') + read.unshift('asd') + }) + ) + read.destroy() } - { const read = new Readable({ - read() { - }, - }); - - read.on('data', common.mustNotCall()); - read.destroy(); - read.unshift('asd'); + read() {} + }) + read.on('data', common.mustNotCall()) + read.destroy() + read.unshift('asd') } - { const read = new Readable({ - read() { - }, - }); - - read.resume(); - read.on('data', common.mustNotCall()); - read.on('close', common.mustCall((e) => { - read.push('asd'); - })); - read.destroy(); + read() {} + }) + read.resume() + read.on('data', common.mustNotCall()) + read.on( + 'close', + common.mustCall((e) => { + read.push('asd') + }) + ) + read.destroy() } - { const read = new Readable({ - read() { - }, - }); - - read.on('data', common.mustNotCall()); - read.destroy(); - read.push('asd'); + read() {} + }) + read.on('data', common.mustNotCall()) + read.destroy() + read.push('asd') } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-didRead.js b/test/parallel/test-stream-readable-didRead.js index 730da10106..96266e1f27 100644 --- a/test/parallel/test-stream-readable-didRead.js +++ b/test/parallel/test-stream-readable-didRead.js @@ -1,126 +1,131 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { isDisturbed, isErrored, Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { isDisturbed, isErrored, Readable } = require('../../lib/ours/index') function noop() {} function check(readable, data, fn) { - assert.strictEqual(readable.readableDidRead, false); - assert.strictEqual(isDisturbed(readable), false); - assert.strictEqual(isErrored(readable), false); + assert.strictEqual(readable.readableDidRead, false) + assert.strictEqual(isDisturbed(readable), false) + assert.strictEqual(isErrored(readable), false) + if (data === -1) { - readable.on('error', common.mustCall(() => { - assert.strictEqual(isErrored(readable), true); - })); - readable.on('data', common.mustNotCall()); - readable.on('end', common.mustNotCall()); + readable.on( + 'error', + common.mustCall(() => { + assert.strictEqual(isErrored(readable), true) + }) + ) + readable.on('data', common.mustNotCall()) + readable.on('end', common.mustNotCall()) } else { - readable.on('error', common.mustNotCall()); + readable.on('error', common.mustNotCall()) + if (data === -2) { - readable.on('end', common.mustNotCall()); + readable.on('end', common.mustNotCall()) } else { - readable.on('end', common.mustCall()); + readable.on('end', common.mustCall()) } + if (data > 0) { - readable.on('data', common.mustCallAtLeast(data)); + readable.on('data', common.mustCallAtLeast(data)) } else { - readable.on('data', common.mustNotCall()); + readable.on('data', common.mustNotCall()) } } - readable.on('close', common.mustCall()); - fn(); + + readable.on('close', common.mustCall()) + fn() setImmediate(() => { - assert.strictEqual(readable.readableDidRead, data > 0); + assert.strictEqual(readable.readableDidRead, data > 0) + if (data > 0) { - assert.strictEqual(isDisturbed(readable), true); + assert.strictEqual(isDisturbed(readable), true) } - }); + }) } { const readable = new Readable({ read() { - this.push(null); + this.push(null) } - }); + }) check(readable, 0, () => { - readable.read(); - }); + readable.read() + }) } - { const readable = new Readable({ read() { - this.push(null); + this.push(null) } - }); + }) check(readable, 0, () => { - readable.resume(); - }); + readable.resume() + }) } - { const readable = new Readable({ read() { - this.push(null); + this.push(null) } - }); + }) check(readable, -2, () => { - readable.destroy(); - }); + readable.destroy() + }) } - { const readable = new Readable({ read() { - this.push(null); + this.push(null) } - }); - + }) check(readable, -1, () => { - readable.destroy(new Error()); - }); + readable.destroy(new Error()) + }) } - { const readable = new Readable({ read() { - this.push('data'); - this.push(null); + this.push('data') + this.push(null) } - }); - + }) check(readable, 1, () => { - readable.on('data', noop); - }); + readable.on('data', noop) + }) } - { const readable = new Readable({ read() { - this.push('data'); - this.push(null); + this.push('data') + this.push(null) } - }); - + }) check(readable, 1, () => { - readable.on('data', noop); - readable.off('data', noop); - }); + readable.on('data', noop) + readable.off('data', noop) + }) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-emit-readable-short-stream.js b/test/parallel/test-stream-readable-emit-readable-short-stream.js index c0ccb3b161..72834baa01 100644 --- a/test/parallel/test-stream-readable-emit-readable-short-stream.js +++ b/test/parallel/test-stream-readable-emit-readable-short-stream.js @@ -1,161 +1,157 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') { const r = new stream.Readable({ - read: common.mustCall(function() { - this.push('content'); - this.push(null); + read: common.mustCall(function () { + this.push('content') + this.push(null) }) - }); - + }) const t = new stream.Transform({ - transform: common.mustCall(function(chunk, encoding, callback) { - this.push(chunk); - return callback(); + transform: common.mustCall(function (chunk, encoding, callback) { + this.push(chunk) + return callback() }), - flush: common.mustCall(function(callback) { - return callback(); + flush: common.mustCall(function (callback) { + return callback() }) - }); - - r.pipe(t); - t.on('readable', common.mustCall(function() { - while (true) { - const chunk = t.read(); - if (!chunk) - break; - - assert.strictEqual(chunk.toString(), 'content'); - } - }, 2)); + }) + r.pipe(t) + t.on( + 'readable', + common.mustCall(function () { + while (true) { + const chunk = t.read() + if (!chunk) break + assert.strictEqual(chunk.toString(), 'content') + } + }, 2) + ) } - { const t = new stream.Transform({ - transform: common.mustCall(function(chunk, encoding, callback) { - this.push(chunk); - return callback(); + transform: common.mustCall(function (chunk, encoding, callback) { + this.push(chunk) + return callback() }), - flush: common.mustCall(function(callback) { - return callback(); + flush: common.mustCall(function (callback) { + return callback() }) - }); - - t.end('content'); - - t.on('readable', common.mustCall(function() { - while (true) { - const chunk = t.read(); - if (!chunk) - break; - assert.strictEqual(chunk.toString(), 'content'); - } - })); + }) + t.end('content') + t.on( + 'readable', + common.mustCall(function () { + while (true) { + const chunk = t.read() + if (!chunk) break + assert.strictEqual(chunk.toString(), 'content') + } + }) + ) } - { const t = new stream.Transform({ - transform: common.mustCall(function(chunk, encoding, callback) { - this.push(chunk); - return callback(); + transform: common.mustCall(function (chunk, encoding, callback) { + this.push(chunk) + return callback() }), - flush: common.mustCall(function(callback) { - return callback(); + flush: common.mustCall(function (callback) { + return callback() }) - }); - - t.write('content'); - t.end(); - - t.on('readable', common.mustCall(function() { - while (true) { - const chunk = t.read(); - if (!chunk) - break; - assert.strictEqual(chunk.toString(), 'content'); - } - })); + }) + t.write('content') + t.end() + t.on( + 'readable', + common.mustCall(function () { + while (true) { + const chunk = t.read() + if (!chunk) break + assert.strictEqual(chunk.toString(), 'content') + } + }) + ) } - { const t = new stream.Readable({ - read() { - } - }); - - t.on('readable', common.mustCall(function() { - while (true) { - const chunk = t.read(); - if (!chunk) - break; - assert.strictEqual(chunk.toString(), 'content'); - } - })); - - t.push('content'); - t.push(null); + read() {} + }) + t.on( + 'readable', + common.mustCall(function () { + while (true) { + const chunk = t.read() + if (!chunk) break + assert.strictEqual(chunk.toString(), 'content') + } + }) + ) + t.push('content') + t.push(null) } - { const t = new stream.Readable({ - read() { - } - }); - - t.on('readable', common.mustCall(function() { - while (true) { - const chunk = t.read(); - if (!chunk) - break; - assert.strictEqual(chunk.toString(), 'content'); - } - }, 2)); - + read() {} + }) + t.on( + 'readable', + common.mustCall(function () { + while (true) { + const chunk = t.read() + if (!chunk) break + assert.strictEqual(chunk.toString(), 'content') + } + }, 2) + ) process.nextTick(() => { - t.push('content'); - t.push(null); - }); + t.push('content') + t.push(null) + }) } - { const t = new stream.Transform({ - transform: common.mustCall(function(chunk, encoding, callback) { - this.push(chunk); - return callback(); + transform: common.mustCall(function (chunk, encoding, callback) { + this.push(chunk) + return callback() }), - flush: common.mustCall(function(callback) { - return callback(); + flush: common.mustCall(function (callback) { + return callback() }) - }); - - t.on('readable', common.mustCall(function() { - while (true) { - const chunk = t.read(); - if (!chunk) - break; - assert.strictEqual(chunk.toString(), 'content'); - } - }, 2)); - - t.write('content'); - t.end(); + }) + t.on( + 'readable', + common.mustCall(function () { + while (true) { + const chunk = t.read() + if (!chunk) break + assert.strictEqual(chunk.toString(), 'content') + } + }, 2) + ) + t.write('content') + t.end() } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-emittedReadable.js b/test/parallel/test-stream-readable-emittedReadable.js index 57f0b82593..34970f6e88 100644 --- a/test/parallel/test-stream-readable-emittedReadable.js +++ b/test/parallel/test-stream-readable-emittedReadable.js @@ -1,88 +1,101 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const Readable = require('../../lib/ours/index').Readable; +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const Readable = require('../../lib/ours/index').Readable const readable = new Readable({ read: () => {} -}); - -// Initialized to false. -assert.strictEqual(readable._readableState.emittedReadable, false); +}) // Initialized to false. -const expected = [Buffer.from('foobar'), Buffer.from('quo'), null]; -readable.on('readable', common.mustCall(() => { - // emittedReadable should be true when the readable event is emitted - assert.strictEqual(readable._readableState.emittedReadable, true); - assert.deepStrictEqual(readable.read(), expected.shift()); - // emittedReadable is reset to false during read() - assert.strictEqual(readable._readableState.emittedReadable, false); -}, 3)); +assert.strictEqual(readable._readableState.emittedReadable, false) +const expected = [Buffer.from('foobar'), Buffer.from('quo'), null] +readable.on( + 'readable', + common.mustCall(() => { + // emittedReadable should be true when the readable event is emitted + assert.strictEqual(readable._readableState.emittedReadable, true) + assert.deepStrictEqual(readable.read(), expected.shift()) // emittedReadable is reset to false during read() -// When the first readable listener is just attached, + assert.strictEqual(readable._readableState.emittedReadable, false) + }, 3) +) // When the first readable listener is just attached, // emittedReadable should be false -assert.strictEqual(readable._readableState.emittedReadable, false); -// These trigger a single 'readable', as things are batched up -process.nextTick(common.mustCall(() => { - readable.push('foo'); -})); -process.nextTick(common.mustCall(() => { - readable.push('bar'); -})); +assert.strictEqual(readable._readableState.emittedReadable, false) // These trigger a single 'readable', as things are batched up -// These triggers two readable events -setImmediate(common.mustCall(() => { - readable.push('quo'); - process.nextTick(common.mustCall(() => { - readable.push(null); - })); -})); +process.nextTick( + common.mustCall(() => { + readable.push('foo') + }) +) +process.nextTick( + common.mustCall(() => { + readable.push('bar') + }) +) // These triggers two readable events +setImmediate( + common.mustCall(() => { + readable.push('quo') + process.nextTick( + common.mustCall(() => { + readable.push(null) + }) + ) + }) +) const noRead = new Readable({ read: () => {} -}); - -noRead.on('readable', common.mustCall(() => { - // emittedReadable should be true when the readable event is emitted - assert.strictEqual(noRead._readableState.emittedReadable, true); - noRead.read(0); - // emittedReadable is not reset during read(0) - assert.strictEqual(noRead._readableState.emittedReadable, true); -})); - -noRead.push('foo'); -noRead.push(null); +}) +noRead.on( + 'readable', + common.mustCall(() => { + // emittedReadable should be true when the readable event is emitted + assert.strictEqual(noRead._readableState.emittedReadable, true) + noRead.read(0) // emittedReadable is not reset during read(0) + assert.strictEqual(noRead._readableState.emittedReadable, true) + }) +) +noRead.push('foo') +noRead.push(null) const flowing = new Readable({ read: () => {} -}); - -flowing.on('data', common.mustCall(() => { - // When in flowing mode, emittedReadable is always false. - assert.strictEqual(flowing._readableState.emittedReadable, false); - flowing.read(); - assert.strictEqual(flowing._readableState.emittedReadable, false); -}, 3)); - -flowing.push('foooo'); -flowing.push('bar'); -flowing.push('quo'); -process.nextTick(common.mustCall(() => { - flowing.push(null); -})); +}) +flowing.on( + 'data', + common.mustCall(() => { + // When in flowing mode, emittedReadable is always false. + assert.strictEqual(flowing._readableState.emittedReadable, false) + flowing.read() + assert.strictEqual(flowing._readableState.emittedReadable, false) + }, 3) +) +flowing.push('foooo') +flowing.push('bar') +flowing.push('quo') +process.nextTick( + common.mustCall(() => { + flowing.push(null) + }) +) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-end-destroyed.js b/test/parallel/test-stream-readable-end-destroyed.js index 39d888e820..9946d8db00 100644 --- a/test/parallel/test-stream-readable-end-destroyed.js +++ b/test/parallel/test-stream-readable-end-destroyed.js @@ -1,32 +1,36 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') { // Don't emit 'end' after 'close'. - - const r = new Readable(); - - r.on('end', common.mustNotCall()); - r.resume(); - r.destroy(); - r.on('close', common.mustCall(() => { - r.push(null); - })); + const r = new Readable() + r.on('end', common.mustNotCall()) + r.resume() + r.destroy() + r.on( + 'close', + common.mustCall(() => { + r.push(null) + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-ended.js b/test/parallel/test-stream-readable-ended.js index a551cfb1f6..d9b1c36696 100644 --- a/test/parallel/test-stream-readable-ended.js +++ b/test/parallel/test-stream-readable-ended.js @@ -1,61 +1,67 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') + +const assert = require('assert') // basic -// basic { // Find it on Readable.prototype - assert(Reflect.has(Readable.prototype, 'readableEnded')); -} + assert(Reflect.has(Readable.prototype, 'readableEnded')) +} // event -// event { - const readable = new Readable(); + const readable = new Readable() readable._read = () => { // The state ended should start in false. - assert.strictEqual(readable.readableEnded, false); - readable.push('asd'); - assert.strictEqual(readable.readableEnded, false); - readable.push(null); - assert.strictEqual(readable.readableEnded, false); - }; - - readable.on('end', common.mustCall(() => { - assert.strictEqual(readable.readableEnded, true); - })); - - readable.on('data', common.mustCall(() => { - assert.strictEqual(readable.readableEnded, false); - })); -} - -// Verifies no `error` triggered on multiple .push(null) invocations -{ - const readable = new Readable(); + assert.strictEqual(readable.readableEnded, false) + readable.push('asd') + assert.strictEqual(readable.readableEnded, false) + readable.push(null) + assert.strictEqual(readable.readableEnded, false) + } - readable.on('readable', () => { readable.read(); }); - readable.on('error', common.mustNotCall()); - readable.on('end', common.mustCall()); + readable.on( + 'end', + common.mustCall(() => { + assert.strictEqual(readable.readableEnded, true) + }) + ) + readable.on( + 'data', + common.mustCall(() => { + assert.strictEqual(readable.readableEnded, false) + }) + ) +} // Verifies no `error` triggered on multiple .push(null) invocations - readable.push('a'); - readable.push(null); - readable.push(null); +{ + const readable = new Readable() + readable.on('readable', () => { + readable.read() + }) + readable.on('error', common.mustNotCall()) + readable.on('end', common.mustCall()) + readable.push('a') + readable.push(null) + readable.push(null) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-error-end.js b/test/parallel/test-stream-readable-error-end.js index a9604ece50..a914d88749 100644 --- a/test/parallel/test-stream-readable-error-end.js +++ b/test/parallel/test-stream-readable-error-end.js @@ -1,30 +1,34 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -{ - const r = new Readable({ read() {} }); +const { Readable } = require('../../lib/ours/index') - r.on('end', common.mustNotCall()); - r.on('data', common.mustCall()); - r.on('error', common.mustCall()); - r.push('asd'); - r.push(null); - r.destroy(new Error('kaboom')); +{ + const r = new Readable({ + read() {} + }) + r.on('end', common.mustNotCall()) + r.on('data', common.mustCall()) + r.on('error', common.mustCall()) + r.push('asd') + r.push(null) + r.destroy(new Error('kaboom')) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js index 743cf74ad0..5e4b402d9b 100644 --- a/test/parallel/test-stream-readable-event.js +++ b/test/parallel/test-stream-readable-event.js @@ -18,126 +18,117 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const assert = require('assert') -const Readable = require('../../lib/ours/index').Readable; +const Readable = require('../../lib/ours/index').Readable { // First test, not reading when the readable is added. // make sure that on('readable', ...) triggers a readable event. const r = new Readable({ highWaterMark: 3 - }); - - r._read = common.mustNotCall(); + }) + r._read = common.mustNotCall() // This triggers a 'readable' event, which is lost. - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')); - - setTimeout(function() { + r.push(Buffer.from('blerg')) + setTimeout(function () { // We're testing what we think we are - assert(!r._readableState.reading); - r.on('readable', common.mustCall()); - }, 1); + assert(!r._readableState.reading) + r.on('readable', common.mustCall()) + }, 1) } - { // Second test, make sure that readable is re-emitted if there's // already a length, while it IS reading. - const r = new Readable({ highWaterMark: 3 - }); - - r._read = common.mustCall(); - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('bl')); + }) + r._read = common.mustCall() // This triggers a 'readable' event, which is lost. - setTimeout(function() { + r.push(Buffer.from('bl')) + setTimeout(function () { // Assert we're testing what we think we are - assert(r._readableState.reading); - r.on('readable', common.mustCall()); - }, 1); + assert(r._readableState.reading) + r.on('readable', common.mustCall()) + }, 1) } - { // Third test, not reading when the stream has not passed // the highWaterMark but *has* reached EOF. const r = new Readable({ highWaterMark: 30 - }); - - r._read = common.mustNotCall(); + }) + r._read = common.mustNotCall() // This triggers a 'readable' event, which is lost. - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')); - r.push(null); - - setTimeout(function() { + r.push(Buffer.from('blerg')) + r.push(null) + setTimeout(function () { // Assert we're testing what we think we are - assert(!r._readableState.reading); - r.on('readable', common.mustCall()); - }, 1); + assert(!r._readableState.reading) + r.on('readable', common.mustCall()) + }, 1) } - { // Pushing an empty string in non-objectMode should // trigger next `read()`. - const underlyingData = ['', 'x', 'y', '', 'z']; - const expected = underlyingData.filter((data) => data); - const result = []; - + const underlyingData = ['', 'x', 'y', '', 'z'] + const expected = underlyingData.filter((data) => data) + const result = [] const r = new Readable({ - encoding: 'utf8', - }); - r._read = function() { + encoding: 'utf8' + }) + + r._read = function () { process.nextTick(() => { if (!underlyingData.length) { - this.push(null); + this.push(null) } else { - this.push(underlyingData.shift()); + this.push(underlyingData.shift()) } - }); - }; + }) + } r.on('readable', () => { - const data = r.read(); - if (data !== null) result.push(data); - }); - - r.on('end', common.mustCall(() => { - assert.deepStrictEqual(result, expected); - })); + const data = r.read() + if (data !== null) result.push(data) + }) + r.on( + 'end', + common.mustCall(() => { + assert.deepStrictEqual(result, expected) + }) + ) } - { // #20923 - const r = new Readable(); - r._read = function() { - // Actually doing thing here - }; - r.on('data', function() {}); + const r = new Readable() - r.removeAllListeners(); + r._read = function () { + // Actually doing thing here + } - assert.strictEqual(r.eventNames().length, 0); + r.on('data', function () {}) + r.removeAllListeners() + assert.strictEqual(r.eventNames().length, 0) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js index 4d18449ee8..e4658af004 100644 --- a/test/parallel/test-stream-readable-flow-recursion.js +++ b/test/parallel/test-stream-readable-flow-recursion.js @@ -18,75 +18,70 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') -// This test verifies that passing a huge number to read(size) +const assert = require('assert') // This test verifies that passing a huge number to read(size) // will push up the highWaterMark, and cause the stream to read // more data continuously, but without triggering a nextTick // warning or RangeError. -const Readable = require('../../lib/ours/index').Readable; +const Readable = require('../../lib/ours/index').Readable // Throw an error if we trigger a nextTick warning. -// Throw an error if we trigger a nextTick warning. -process.throwDeprecation = true; +process.throwDeprecation = true +const stream = new Readable({ + highWaterMark: 2 +}) +let reads = 0 +let total = 5000 -const stream = new Readable({ highWaterMark: 2 }); -let reads = 0; -let total = 5000; -stream._read = function(size) { - reads++; - size = Math.min(size, total); - total -= size; - if (size === 0) - stream.push(null); - else - stream.push(Buffer.allocUnsafe(size)); -}; +stream._read = function (size) { + reads++ + size = Math.min(size, total) + total -= size + if (size === 0) stream.push(null) + else stream.push(Buffer.allocUnsafe(size)) +} -let depth = 0; +let depth = 0 function flow(stream, size, callback) { - depth += 1; - const chunk = stream.read(size); - - if (!chunk) - stream.once('readable', flow.bind(null, stream, size, callback)); - else - callback(chunk); - - depth -= 1; - silentConsole.log(`flow(${depth}): exit`); + depth += 1 + const chunk = stream.read(size) + if (!chunk) stream.once('readable', flow.bind(null, stream, size, callback)) + else callback(chunk) + depth -= 1 + silentConsole.log(`flow(${depth}): exit`) } -flow(stream, 5000, function() { - silentConsole.log(`complete (${depth})`); -}); +flow(stream, 5000, function () { + silentConsole.log(`complete (${depth})`) +}) +process.on('exit', function (code) { + assert.strictEqual(reads, 2) // We pushed up the high water mark + + assert.strictEqual(stream.readableHighWaterMark, 8192) // Length is 0 right now, because we pulled it all out. -process.on('exit', function(code) { - assert.strictEqual(reads, 2); - // We pushed up the high water mark - assert.strictEqual(stream.readableHighWaterMark, 8192); - // Length is 0 right now, because we pulled it all out. - assert.strictEqual(stream.readableLength, 0); - assert(!code); - assert.strictEqual(depth, 0); - silentConsole.log('ok'); -}); + assert.strictEqual(stream.readableLength, 0) + assert(!code) + assert.strictEqual(depth, 0) + silentConsole.log('ok') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-hwm-0-async.js b/test/parallel/test-stream-readable-hwm-0-async.js index f3b9c70f5c..b683b3b407 100644 --- a/test/parallel/test-stream-readable-hwm-0-async.js +++ b/test/parallel/test-stream-readable-hwm-0-async.js @@ -1,42 +1,40 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); - -// This test ensures that Readable stream will continue to call _read + error() {} +} +const common = require('../common') // This test ensures that Readable stream will continue to call _read // for streams with highWaterMark === 0 once the stream returns data // by calling push() asynchronously. -const { Readable } = require('../../lib/ours/index'); - -let count = 5; +const { Readable } = require('../../lib/ours/index') +let count = 5 const r = new Readable({ // Called 6 times: First 5 return data, last one signals end of stream. read: common.mustCall(() => { - process.nextTick(common.mustCall(() => { - if (count--) - r.push('a'); - else - r.push(null); - })); + process.nextTick( + common.mustCall(() => { + if (count--) r.push('a') + else r.push(null) + }) + ) }, 6), - highWaterMark: 0, -}); - -r.on('end', common.mustCall()); -r.on('data', common.mustCall(5)); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + highWaterMark: 0 +}) +r.on('end', common.mustCall()) +r.on('data', common.mustCall(5)) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js index 8925218549..c26401b35b 100644 --- a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js +++ b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js @@ -1,72 +1,72 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); - -// Ensure that subscribing the 'data' event will not make the stream flow. + error() {} +} +const common = require('../common') // Ensure that subscribing the 'data' event will not make the stream flow. // The 'data' event will require calling read() by hand. // // The test is written for the (somewhat rare) highWaterMark: 0 streams to // specifically catch any regressions that might occur with these streams. -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); +const assert = require('assert') + +const { Readable } = require('../../lib/ours/index') -const streamData = [ 'a', null ]; +const streamData = ['a', null] // Track the calls so we can assert their order later. -// Track the calls so we can assert their order later. -const calls = []; +const calls = [] const r = new Readable({ read: common.mustCall(() => { - calls.push('_read:' + streamData[0]); + calls.push('_read:' + streamData[0]) process.nextTick(() => { - calls.push('push:' + streamData[0]); - r.push(streamData.shift()); - }); + calls.push('push:' + streamData[0]) + r.push(streamData.shift()) + }) }, streamData.length), highWaterMark: 0, - // Object mode is used here just for testing convenience. It really // shouldn't affect the order of events. Just the data and its format. - objectMode: true, -}); - -assert.strictEqual(r.readableFlowing, null); -r.on('readable', common.mustCall(() => { - calls.push('readable'); -}, 2)); -assert.strictEqual(r.readableFlowing, false); -r.on('data', common.mustCall((data) => { - calls.push('data:' + data); -}, 1)); -r.on('end', common.mustCall(() => { - calls.push('end'); -})); -assert.strictEqual(r.readableFlowing, false); - -// The stream emits the events asynchronously but that's not guaranteed to + objectMode: true +}) +assert.strictEqual(r.readableFlowing, null) +r.on( + 'readable', + common.mustCall(() => { + calls.push('readable') + }, 2) +) +assert.strictEqual(r.readableFlowing, false) +r.on( + 'data', + common.mustCall((data) => { + calls.push('data:' + data) + }, 1) +) +r.on( + 'end', + common.mustCall(() => { + calls.push('end') + }) +) +assert.strictEqual(r.readableFlowing, false) // The stream emits the events asynchronously but that's not guaranteed to // happen on the next tick (especially since the _read implementation above // uses process.nextTick). // // We use setImmediate here to give the stream enough time to emit all the // events it's about to emit. -setImmediate(() => { +setImmediate(() => { // Only the _read, push, readable calls have happened. No data must be // emitted yet. - assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']); + assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']) // Calling 'r.read()' should trigger the data event. - // Calling 'r.read()' should trigger the data event. - assert.strictEqual(r.read(), 'a'); - assert.deepStrictEqual( - calls, - ['_read:a', 'push:a', 'readable', 'data:a']); - - // The next 'read()' will return null because hwm: 0 does not buffer any + assert.strictEqual(r.read(), 'a') + assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a']) // The next 'read()' will return null because hwm: 0 does not buffer any // data and the _read implementation above does the push() asynchronously. // // Note: This 'null' signals "no data available". It isn't the end-of-stream @@ -75,45 +75,43 @@ setImmediate(() => { // // Using setImmediate again to give the stream enough time to emit all the // events it wants to emit. - assert.strictEqual(r.read(), null); - setImmediate(() => { + assert.strictEqual(r.read(), null) + setImmediate(() => { // There's a new 'readable' event after the data has been pushed. // The 'end' event will be emitted only after a 'read()'. // // This is somewhat special for the case where the '_read' implementation // calls 'push' asynchronously. If 'push' was synchronous, the 'end' event // would be emitted here _before_ we call read(). - assert.deepStrictEqual( - calls, - ['_read:a', 'push:a', 'readable', 'data:a', - '_read:null', 'push:null', 'readable']); - - assert.strictEqual(r.read(), null); - - // While it isn't really specified whether the 'end' event should happen + assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable']) + assert.strictEqual(r.read(), null) // While it isn't really specified whether the 'end' event should happen // synchronously with read() or not, we'll assert the current behavior // ('end' event happening on the next tick after read()) so any changes // to it are noted and acknowledged in the future. - assert.deepStrictEqual( - calls, - ['_read:a', 'push:a', 'readable', 'data:a', - '_read:null', 'push:null', 'readable']); - process.nextTick(() => { - assert.deepStrictEqual( - calls, - ['_read:a', 'push:a', 'readable', 'data:a', - '_read:null', 'push:null', 'readable', 'end']); - }); - }); -}); - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable']) + process.nextTick(() => { + assert.deepStrictEqual(calls, [ + '_read:a', + 'push:a', + 'readable', + 'data:a', + '_read:null', + 'push:null', + 'readable', + 'end' + ]) + }) + }) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-hwm-0.js b/test/parallel/test-stream-readable-hwm-0.js index 4dfd1a7ad4..196cfec35b 100644 --- a/test/parallel/test-stream-readable-hwm-0.js +++ b/test/parallel/test-stream-readable-hwm-0.js @@ -1,45 +1,48 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); - -// This test ensures that Readable stream will call _read() for streams + error() {} +} +const common = require('../common') // This test ensures that Readable stream will call _read() for streams // with highWaterMark === 0 upon .read(0) instead of just trying to // emit 'readable' event. -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); +const assert = require('assert') + +const { Readable } = require('../../lib/ours/index') const r = new Readable({ // Must be called only once upon setting 'readable' listener read: common.mustCall(), - highWaterMark: 0, -}); - -let pushedNull = false; -// This will trigger read(0) but must only be called after push(null) + highWaterMark: 0 +}) +let pushedNull = false // This will trigger read(0) but must only be called after push(null) // because the we haven't pushed any data -r.on('readable', common.mustCall(() => { - assert.strictEqual(r.read(), null); - assert.strictEqual(pushedNull, true); -})); -r.on('end', common.mustCall()); + +r.on( + 'readable', + common.mustCall(() => { + assert.strictEqual(r.read(), null) + assert.strictEqual(pushedNull, true) + }) +) +r.on('end', common.mustCall()) process.nextTick(() => { - assert.strictEqual(r.read(), null); - pushedNull = true; - r.push(null); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.strictEqual(r.read(), null) + pushedNull = true + r.push(null) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js index 84ac8c662c..569e8ee316 100644 --- a/test/parallel/test-stream-readable-infinite-read.js +++ b/test/parallel/test-stream-readable-infinite-read.js @@ -1,47 +1,51 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -const buf = Buffer.alloc(8192); +const assert = require('assert') +const { Readable } = require('../../lib/ours/index') + +const buf = Buffer.alloc(8192) const readable = new Readable({ - read: common.mustCall(function() { - this.push(buf); + read: common.mustCall(function () { + this.push(buf) }, 31) -}); +}) +let i = 0 +readable.on( + 'readable', + common.mustCall(function () { + if (i++ === 10) { + // We will just terminate now. + process.removeAllListeners('readable') + return + } -let i = 0; + const data = readable.read() // TODO(mcollina): there is something odd in the highWaterMark logic + // investigate. -readable.on('readable', common.mustCall(function() { - if (i++ === 10) { - // We will just terminate now. - process.removeAllListeners('readable'); - return; - } + if (i === 1) { + assert.strictEqual(data.length, 8192 * 2) + } else { + assert.strictEqual(data.length, 8192 * 3) + } + }, 11) +) +/* replacement start */ - const data = readable.read(); - // TODO(mcollina): there is something odd in the highWaterMark logic - // investigate. - if (i === 1) { - assert.strictEqual(data.length, 8192 * 2); +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') } else { - assert.strictEqual(data.length, 8192 * 3); + tap.fail(`test failed - exited code ${code}`) } -}, 11)); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-invalid-chunk.js b/test/parallel/test-stream-readable-invalid-chunk.js index 4ed7d33a9e..a622105435 100644 --- a/test/parallel/test-stream-readable-invalid-chunk.js +++ b/test/parallel/test-stream-readable-invalid-chunk.js @@ -1,49 +1,58 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') function testPushArg(val) { const readable = new stream.Readable({ read: () => {} - }); - readable.on('error', common.expectsError({ - code: 'ERR_INVALID_ARG_TYPE', - name: 'TypeError' - })); - readable.push(val); + }) + readable.on( + 'error', + common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }) + ) + readable.push(val) } -testPushArg([]); -testPushArg({}); -testPushArg(0); +testPushArg([]) +testPushArg({}) +testPushArg(0) function testUnshiftArg(val) { const readable = new stream.Readable({ read: () => {} - }); - readable.on('error', common.expectsError({ - code: 'ERR_INVALID_ARG_TYPE', - name: 'TypeError' - })); - readable.unshift(val); + }) + readable.on( + 'error', + common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }) + ) + readable.unshift(val) } -testUnshiftArg([]); -testUnshiftArg({}); -testUnshiftArg(0); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +testUnshiftArg([]) +testUnshiftArg({}) +testUnshiftArg(0) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js index 1f0ecaba40..0a1fd7ce9b 100644 --- a/test/parallel/test-stream-readable-needReadable.js +++ b/test/parallel/test-stream-readable-needReadable.js @@ -1,114 +1,137 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const Readable = require('../../lib/ours/index').Readable; +const silentConsole = { + log() {}, -const readable = new Readable({ - read: () => {} -}); - -// Initialized to false. -assert.strictEqual(readable._readableState.needReadable, false); + error() {} +} +const common = require('../common') -readable.on('readable', common.mustCall(() => { - // When the readable event fires, needReadable is reset. - assert.strictEqual(readable._readableState.needReadable, false); - readable.read(); -})); +const assert = require('assert') -// If a readable listener is attached, then a readable event is needed. -assert.strictEqual(readable._readableState.needReadable, true); - -readable.push('foo'); -readable.push(null); - -readable.on('end', common.mustCall(() => { - // No need to emit readable anymore when the stream ends. - assert.strictEqual(readable._readableState.needReadable, false); -})); +const Readable = require('../../lib/ours/index').Readable +const readable = new Readable({ + read: () => {} +}) // Initialized to false. + +assert.strictEqual(readable._readableState.needReadable, false) +readable.on( + 'readable', + common.mustCall(() => { + // When the readable event fires, needReadable is reset. + assert.strictEqual(readable._readableState.needReadable, false) + readable.read() + }) +) // If a readable listener is attached, then a readable event is needed. + +assert.strictEqual(readable._readableState.needReadable, true) +readable.push('foo') +readable.push(null) +readable.on( + 'end', + common.mustCall(() => { + // No need to emit readable anymore when the stream ends. + assert.strictEqual(readable._readableState.needReadable, false) + }) +) const asyncReadable = new Readable({ read: () => {} -}); - -asyncReadable.on('readable', common.mustCall(() => { - if (asyncReadable.read() !== null) { - // After each read(), the buffer is empty. - // If the stream doesn't end now, - // then we need to notify the reader on future changes. - assert.strictEqual(asyncReadable._readableState.needReadable, true); - } -}, 2)); - -process.nextTick(common.mustCall(() => { - asyncReadable.push('foooo'); -})); -process.nextTick(common.mustCall(() => { - asyncReadable.push('bar'); -})); -setImmediate(common.mustCall(() => { - asyncReadable.push(null); - assert.strictEqual(asyncReadable._readableState.needReadable, false); -})); - +}) +asyncReadable.on( + 'readable', + common.mustCall(() => { + if (asyncReadable.read() !== null) { + // After each read(), the buffer is empty. + // If the stream doesn't end now, + // then we need to notify the reader on future changes. + assert.strictEqual(asyncReadable._readableState.needReadable, true) + } + }, 2) +) +process.nextTick( + common.mustCall(() => { + asyncReadable.push('foooo') + }) +) +process.nextTick( + common.mustCall(() => { + asyncReadable.push('bar') + }) +) +setImmediate( + common.mustCall(() => { + asyncReadable.push(null) + assert.strictEqual(asyncReadable._readableState.needReadable, false) + }) +) const flowing = new Readable({ read: () => {} -}); - -// Notice this must be above the on('data') call. -flowing.push('foooo'); -flowing.push('bar'); -flowing.push('quo'); -process.nextTick(common.mustCall(() => { - flowing.push(null); -})); - -// When the buffer already has enough data, and the stream is +}) // Notice this must be above the on('data') call. + +flowing.push('foooo') +flowing.push('bar') +flowing.push('quo') +process.nextTick( + common.mustCall(() => { + flowing.push(null) + }) +) // When the buffer already has enough data, and the stream is // in flowing mode, there is no need for the readable event. -flowing.on('data', common.mustCall(function(data) { - assert.strictEqual(flowing._readableState.needReadable, false); -}, 3)); +flowing.on( + 'data', + common.mustCall(function (data) { + assert.strictEqual(flowing._readableState.needReadable, false) + }, 3) +) const slowProducer = new Readable({ read: () => {} -}); - -slowProducer.on('readable', common.mustCall(() => { - const chunk = slowProducer.read(8); - const state = slowProducer._readableState; - if (chunk === null) { - // The buffer doesn't have enough data, and the stream is not need, - // we need to notify the reader when data arrives. - assert.strictEqual(state.needReadable, true); - } else { - assert.strictEqual(state.needReadable, false); - } -}, 4)); - -process.nextTick(common.mustCall(() => { - slowProducer.push('foo'); - process.nextTick(common.mustCall(() => { - slowProducer.push('foo'); - process.nextTick(common.mustCall(() => { - slowProducer.push('foo'); - process.nextTick(common.mustCall(() => { - slowProducer.push(null); - })); - })); - })); -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); +}) +slowProducer.on( + 'readable', + common.mustCall(() => { + const chunk = slowProducer.read(8) + const state = slowProducer._readableState + + if (chunk === null) { + // The buffer doesn't have enough data, and the stream is not need, + // we need to notify the reader when data arrives. + assert.strictEqual(state.needReadable, true) } else { - tap.fail(`test failed - exited code ${code}`); + assert.strictEqual(state.needReadable, false) } - }); - /* replacement end */ + }, 4) +) +process.nextTick( + common.mustCall(() => { + slowProducer.push('foo') + process.nextTick( + common.mustCall(() => { + slowProducer.push('foo') + process.nextTick( + common.mustCall(() => { + slowProducer.push('foo') + process.nextTick( + common.mustCall(() => { + slowProducer.push(null) + }) + ) + }) + ) + }) + ) + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-next-no-null.js b/test/parallel/test-stream-readable-next-no-null.js index 0d72fdd8d0..585f340697 100644 --- a/test/parallel/test-stream-readable-next-no-null.js +++ b/test/parallel/test-stream-readable-next-no-null.js @@ -1,34 +1,41 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const { mustNotCall, expectsError } = require('../common'); -const { Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -async function* generate() { - yield null; + error() {} } +const { mustNotCall, expectsError } = require('../common') -const stream = Readable.from(generate()); - -stream.on('error', expectsError({ - code: 'ERR_STREAM_NULL_VALUES', - name: 'TypeError', - message: 'May not write null values to stream' -})); - -stream.on('data', mustNotCall((chunk) => {})); +const { Readable } = require('../../lib/ours/index') -stream.on('end', mustNotCall()); +async function* generate() { + yield null +} - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const stream = Readable.from(generate()) +stream.on( + 'error', + expectsError({ + code: 'ERR_STREAM_NULL_VALUES', + name: 'TypeError', + message: 'May not write null values to stream' + }) +) +stream.on( + 'data', + mustNotCall((chunk) => {}) +) +stream.on('end', mustNotCall()) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-no-unneeded-readable.js b/test/parallel/test-stream-readable-no-unneeded-readable.js index c2620e4cea..89235fa261 100644 --- a/test/parallel/test-stream-readable-no-unneeded-readable.js +++ b/test/parallel/test-stream-readable-no-unneeded-readable.js @@ -1,77 +1,77 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Readable, PassThrough } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const { Readable, PassThrough } = require('../../lib/ours/index') function test(r) { const wrapper = new Readable({ read: () => { - let data = r.read(); + let data = r.read() if (data) { - wrapper.push(data); - return; + wrapper.push(data) + return } - r.once('readable', function() { - data = r.read(); - if (data) { - wrapper.push(data); - } - // else: the end event should fire - }); - }, - }); + r.once('readable', function () { + data = r.read() - r.once('end', function() { - wrapper.push(null); - }); - - wrapper.resume(); - wrapper.once('end', common.mustCall()); + if (data) { + wrapper.push(data) + } // else: the end event should fire + }) + } + }) + r.once('end', function () { + wrapper.push(null) + }) + wrapper.resume() + wrapper.once('end', common.mustCall()) } { const source = new Readable({ read: () => {} - }); - source.push('foo'); - source.push('bar'); - source.push(null); - - const pt = source.pipe(new PassThrough()); - test(pt); + }) + source.push('foo') + source.push('bar') + source.push(null) + const pt = source.pipe(new PassThrough()) + test(pt) } - { // This is the underlying cause of the above test case. - const pushChunks = ['foo', 'bar']; + const pushChunks = ['foo', 'bar'] const r = new Readable({ read: () => { - const chunk = pushChunks.shift(); + const chunk = pushChunks.shift() + if (chunk) { // synchronous call - r.push(chunk); + r.push(chunk) } else { // asynchronous call - process.nextTick(() => r.push(null)); + process.nextTick(() => r.push(null)) } - }, - }); - - test(r); + } + }) + test(r) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js index 3820cddf16..ff8eb0a837 100644 --- a/test/parallel/test-stream-readable-object-multi-push-async.js +++ b/test/parallel/test-stream-readable-object-multi-push-async.js @@ -1,198 +1,206 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') -const MAX = 42; -const BATCH = 10; +const { Readable } = require('../../lib/ours/index') +const MAX = 42 +const BATCH = 10 { const readable = new Readable({ objectMode: true, - read: common.mustCall(function() { - silentConsole.log('>> READ'); + read: common.mustCall(function () { + silentConsole.log('>> READ') fetchData((err, data) => { if (err) { - this.destroy(err); - return; + this.destroy(err) + return } if (data.length === 0) { - silentConsole.log('pushing null'); - this.push(null); - return; + silentConsole.log('pushing null') + this.push(null) + return } - silentConsole.log('pushing'); - data.forEach((d) => this.push(d)); - }); + silentConsole.log('pushing') + data.forEach((d) => this.push(d)) + }) }, Math.floor(MAX / BATCH) + 2) - }); + }) + let i = 0 - let i = 0; function fetchData(cb) { if (i > MAX) { - setTimeout(cb, 10, null, []); + setTimeout(cb, 10, null, []) } else { - const array = []; - const max = i + BATCH; + const array = [] + const max = i + BATCH + for (; i < max; i++) { - array.push(i); + array.push(i) } - setTimeout(cb, 10, null, array); + + setTimeout(cb, 10, null, array) } } readable.on('readable', () => { - let data; - silentConsole.log('readable emitted'); + let data + silentConsole.log('readable emitted') + while ((data = readable.read()) !== null) { - silentConsole.log(data); + silentConsole.log(data) } - }); - - readable.on('end', common.mustCall(() => { - assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); - })); + }) + readable.on( + 'end', + common.mustCall(() => { + assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH) + }) + ) } - { const readable = new Readable({ objectMode: true, - read: common.mustCall(function() { - silentConsole.log('>> READ'); + read: common.mustCall(function () { + silentConsole.log('>> READ') fetchData((err, data) => { if (err) { - this.destroy(err); - return; + this.destroy(err) + return } if (data.length === 0) { - silentConsole.log('pushing null'); - this.push(null); - return; + silentConsole.log('pushing null') + this.push(null) + return } - silentConsole.log('pushing'); - data.forEach((d) => this.push(d)); - }); + silentConsole.log('pushing') + data.forEach((d) => this.push(d)) + }) }, Math.floor(MAX / BATCH) + 2) - }); + }) + let i = 0 - let i = 0; function fetchData(cb) { if (i > MAX) { - setTimeout(cb, 10, null, []); + setTimeout(cb, 10, null, []) } else { - const array = []; - const max = i + BATCH; + const array = [] + const max = i + BATCH + for (; i < max; i++) { - array.push(i); + array.push(i) } - setTimeout(cb, 10, null, array); + + setTimeout(cb, 10, null, array) } } readable.on('data', (data) => { - silentConsole.log('data emitted', data); - }); - - readable.on('end', common.mustCall(() => { - assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); - })); + silentConsole.log('data emitted', data) + }) + readable.on( + 'end', + common.mustCall(() => { + assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH) + }) + ) } - { const readable = new Readable({ objectMode: true, - read: common.mustCall(function() { - silentConsole.log('>> READ'); + read: common.mustCall(function () { + silentConsole.log('>> READ') fetchData((err, data) => { if (err) { - this.destroy(err); - return; + this.destroy(err) + return } - silentConsole.log('pushing'); - data.forEach((d) => this.push(d)); + silentConsole.log('pushing') + data.forEach((d) => this.push(d)) if (data[BATCH - 1] >= MAX) { - silentConsole.log('pushing null'); - this.push(null); + silentConsole.log('pushing null') + this.push(null) } - }); + }) }, Math.floor(MAX / BATCH) + 1) - }); + }) + let i = 0 - let i = 0; function fetchData(cb) { - const array = []; - const max = i + BATCH; + const array = [] + const max = i + BATCH + for (; i < max; i++) { - array.push(i); + array.push(i) } - setTimeout(cb, 10, null, array); + + setTimeout(cb, 10, null, array) } readable.on('data', (data) => { - silentConsole.log('data emitted', data); - }); - - readable.on('end', common.mustCall(() => { - assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH); - })); + silentConsole.log('data emitted', data) + }) + readable.on( + 'end', + common.mustCall(() => { + assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH) + }) + ) } - { const readable = new Readable({ objectMode: true, read: common.mustNotCall() - }); - - readable.on('data', common.mustNotCall()); - - readable.push(null); - - let nextTickPassed = false; + }) + readable.on('data', common.mustNotCall()) + readable.push(null) + let nextTickPassed = false process.nextTick(() => { - nextTickPassed = true; - }); - - readable.on('end', common.mustCall(() => { - assert.strictEqual(nextTickPassed, true); - })); + nextTickPassed = true + }) + readable.on( + 'end', + common.mustCall(() => { + assert.strictEqual(nextTickPassed, true) + }) + ) } - { const readable = new Readable({ objectMode: true, read: common.mustCall() - }); - + }) readable.on('data', (data) => { - silentConsole.log('data emitted', data); - }); - - readable.on('end', common.mustCall()); - + silentConsole.log('data emitted', data) + }) + readable.on('end', common.mustCall()) setImmediate(() => { - readable.push('aaa'); - readable.push(null); - }); + readable.push('aaa') + readable.push(null) + }) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-pause-and-resume.js b/test/parallel/test-stream-readable-pause-and-resume.js index fbe720ca66..b745e7a4d7 100644 --- a/test/parallel/test-stream-readable-pause-and-resume.js +++ b/test/parallel/test-stream-readable-pause-and-resume.js @@ -1,89 +1,88 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') -let ticks = 18; -let expectedData = 19; +const { Readable } = require('../../lib/ours/index') +let ticks = 18 +let expectedData = 19 const rs = new Readable({ objectMode: true, read: () => { - if (ticks-- > 0) - return process.nextTick(() => rs.push({})); - rs.push({}); - rs.push(null); + if (ticks-- > 0) return process.nextTick(() => rs.push({})) + rs.push({}) + rs.push(null) } -}); - -rs.on('end', common.mustCall()); -readAndPause(); +}) +rs.on('end', common.mustCall()) +readAndPause() function readAndPause() { // Does a on(data) -> pause -> wait -> resume -> on(data) ... loop. // Expects on(data) to never fire if the stream is paused. const ondata = common.mustCall((data) => { - rs.pause(); - - expectedData--; - if (expectedData <= 0) - return; - - setImmediate(function() { - rs.removeListener('data', ondata); - readAndPause(); - rs.resume(); - }); - }, 1); // Only call ondata once - - rs.on('data', ondata); + rs.pause() + expectedData-- + if (expectedData <= 0) return + setImmediate(function () { + rs.removeListener('data', ondata) + readAndPause() + rs.resume() + }) + }, 1) // Only call ondata once + + rs.on('data', ondata) } { const readable = new Readable({ read() {} - }); + }) function read() {} - readable.setEncoding('utf8'); - readable.on('readable', read); - readable.removeListener('readable', read); - readable.pause(); - - process.nextTick(function() { - assert(readable.isPaused()); - }); + readable.setEncoding('utf8') + readable.on('readable', read) + readable.removeListener('readable', read) + readable.pause() + process.nextTick(function () { + assert(readable.isPaused()) + }) } - { - const { PassThrough } = require('../../lib/ours/index'); + const { PassThrough } = require('../../lib/ours/index') - const source3 = new PassThrough(); - const target3 = new PassThrough(); + const source3 = new PassThrough() + const target3 = new PassThrough() + const chunk = Buffer.allocUnsafe(1000) - const chunk = Buffer.allocUnsafe(1000); while (target3.write(chunk)); - source3.pipe(target3); - target3.on('drain', common.mustCall(() => { - assert(!source3.isPaused()); - })); - target3.on('data', () => {}); + source3.pipe(target3) + target3.on( + 'drain', + common.mustCall(() => { + assert(!source3.isPaused()) + }) + ) + target3.on('data', () => {}) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-readable-then-resume.js b/test/parallel/test-stream-readable-readable-then-resume.js index 050795e7ea..c5d57d6c4a 100644 --- a/test/parallel/test-stream-readable-readable-then-resume.js +++ b/test/parallel/test-stream-readable-readable-then-resume.js @@ -1,46 +1,51 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') -// This test verifies that a stream could be resumed after +const assert = require('assert') // This test verifies that a stream could be resumed after // removing the readable event in the same tick -check(new Readable({ - objectMode: true, - highWaterMark: 1, - read() { - if (!this.first) { - this.push('hello'); - this.first = true; - return; - } +check( + new Readable({ + objectMode: true, + highWaterMark: 1, - this.push(null); - } -})); + read() { + if (!this.first) { + this.push('hello') + this.first = true + return + } + + this.push(null) + } + }) +) function check(s) { - const readableListener = common.mustNotCall(); - s.on('readable', readableListener); - s.on('end', common.mustCall()); - assert.strictEqual(s.removeListener, s.off); - s.removeListener('readable', readableListener); - s.resume(); + const readableListener = common.mustNotCall() + s.on('readable', readableListener) + s.on('end', common.mustCall()) + assert.strictEqual(s.removeListener, s.off) + s.removeListener('readable', readableListener) + s.resume() } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-readable.js b/test/parallel/test-stream-readable-readable.js index a7c25ecacd..340a588ce2 100644 --- a/test/parallel/test-stream-readable-readable.js +++ b/test/parallel/test-stream-readable-readable.js @@ -1,60 +1,68 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable } = require('../../lib/ours/index') { const r = new Readable({ read() {} - }); - assert.strictEqual(r.readable, true); - r.destroy(); - assert.strictEqual(r.readable, false); + }) + assert.strictEqual(r.readable, true) + r.destroy() + assert.strictEqual(r.readable, false) } - { - const mustNotCall = common.mustNotCall(); + const mustNotCall = common.mustNotCall() const r = new Readable({ read() {} - }); - assert.strictEqual(r.readable, true); - r.on('end', mustNotCall); - r.resume(); - r.push(null); - assert.strictEqual(r.readable, true); - r.off('end', mustNotCall); - r.on('end', common.mustCall(() => { - assert.strictEqual(r.readable, false); - })); + }) + assert.strictEqual(r.readable, true) + r.on('end', mustNotCall) + r.resume() + r.push(null) + assert.strictEqual(r.readable, true) + r.off('end', mustNotCall) + r.on( + 'end', + common.mustCall(() => { + assert.strictEqual(r.readable, false) + }) + ) } - { const r = new Readable({ read: common.mustCall(() => { process.nextTick(() => { - r.destroy(new Error()); - assert.strictEqual(r.readable, false); - }); + r.destroy(new Error()) + assert.strictEqual(r.readable, false) + }) + }) + }) + r.resume() + r.on( + 'error', + common.mustCall(() => { + assert.strictEqual(r.readable, false) }) - }); - r.resume(); - r.on('error', common.mustCall(() => { - assert.strictEqual(r.readable, false); - })); + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js index 9a6d521c1c..e1d9f526b7 100644 --- a/test/parallel/test-stream-readable-reading-readingMore.js +++ b/test/parallel/test-stream-readable-reading-readingMore.js @@ -1,186 +1,167 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const Readable = require('../../lib/ours/index').Readable; +const silentConsole = { + log() {}, -{ - const readable = new Readable({ - read(size) {} - }); - - const state = readable._readableState; + error() {} +} +const common = require('../common') - // Starting off with false initially. - assert.strictEqual(state.reading, false); - assert.strictEqual(state.readingMore, false); +const assert = require('assert') - readable.on('data', common.mustCall((data) => { - // While in a flowing state with a 'readable' listener - // we should not be reading more - if (readable.readableFlowing) - assert.strictEqual(state.readingMore, true); +const Readable = require('../../lib/ours/index').Readable - // Reading as long as we've not ended - assert.strictEqual(state.reading, !state.ended); - }, 2)); +{ + const readable = new Readable({ + read(size) {} + }) + const state = readable._readableState // Starting off with false initially. + + assert.strictEqual(state.reading, false) + assert.strictEqual(state.readingMore, false) + readable.on( + 'data', + common.mustCall((data) => { + // While in a flowing state with a 'readable' listener + // we should not be reading more + if (readable.readableFlowing) assert.strictEqual(state.readingMore, true) // Reading as long as we've not ended + + assert.strictEqual(state.reading, !state.ended) + }, 2) + ) function onStreamEnd() { // End of stream; state.reading is false // And so should be readingMore. - assert.strictEqual(state.readingMore, false); - assert.strictEqual(state.reading, false); + assert.strictEqual(state.readingMore, false) + assert.strictEqual(state.reading, false) } - const expectedReadingMore = [true, true, false]; - readable.on('readable', common.mustCall(() => { - // There is only one readingMore scheduled from on('data'), - // after which everything is governed by the .read() call - assert.strictEqual(state.readingMore, expectedReadingMore.shift()); - - // If the stream has ended, we shouldn't be reading - assert.strictEqual(state.ended, !state.reading); - - // Consume all the data - while (readable.read() !== null); + const expectedReadingMore = [true, true, false] + readable.on( + 'readable', + common.mustCall(() => { + // There is only one readingMore scheduled from on('data'), + // after which everything is governed by the .read() call + assert.strictEqual(state.readingMore, expectedReadingMore.shift()) // If the stream has ended, we shouldn't be reading - if (expectedReadingMore.length === 0) // Reached end of stream - process.nextTick(common.mustCall(onStreamEnd, 1)); - }, 3)); + assert.strictEqual(state.ended, !state.reading) // Consume all the data - readable.on('end', common.mustCall(onStreamEnd)); - readable.push('pushed'); + while (readable.read() !== null); - readable.read(6); + if (expectedReadingMore.length === 0) + // Reached end of stream + process.nextTick(common.mustCall(onStreamEnd, 1)) + }, 3) + ) + readable.on('end', common.mustCall(onStreamEnd)) + readable.push('pushed') + readable.read(6) // reading - // reading - assert.strictEqual(state.reading, true); - assert.strictEqual(state.readingMore, true); + assert.strictEqual(state.reading, true) + assert.strictEqual(state.readingMore, true) // add chunk to front - // add chunk to front - readable.unshift('unshifted'); + readable.unshift('unshifted') // end - // end - readable.push(null); + readable.push(null) } - { const readable = new Readable({ read(size) {} - }); - - const state = readable._readableState; - - // Starting off with false initially. - assert.strictEqual(state.reading, false); - assert.strictEqual(state.readingMore, false); - - readable.on('data', common.mustCall((data) => { - // While in a flowing state without a 'readable' listener - // we should be reading more - if (readable.readableFlowing) - assert.strictEqual(state.readingMore, true); - - // Reading as long as we've not ended - assert.strictEqual(state.reading, !state.ended); - }, 2)); + }) + const state = readable._readableState // Starting off with false initially. + + assert.strictEqual(state.reading, false) + assert.strictEqual(state.readingMore, false) + readable.on( + 'data', + common.mustCall((data) => { + // While in a flowing state without a 'readable' listener + // we should be reading more + if (readable.readableFlowing) assert.strictEqual(state.readingMore, true) // Reading as long as we've not ended + + assert.strictEqual(state.reading, !state.ended) + }, 2) + ) function onStreamEnd() { // End of stream; state.reading is false // And so should be readingMore. - assert.strictEqual(state.readingMore, false); - assert.strictEqual(state.reading, false); + assert.strictEqual(state.readingMore, false) + assert.strictEqual(state.reading, false) } - readable.on('end', common.mustCall(onStreamEnd)); - readable.push('pushed'); - - // Stop emitting 'data' events - assert.strictEqual(state.flowing, true); - readable.pause(); + readable.on('end', common.mustCall(onStreamEnd)) + readable.push('pushed') // Stop emitting 'data' events - // paused - assert.strictEqual(state.reading, false); - assert.strictEqual(state.flowing, false); + assert.strictEqual(state.flowing, true) + readable.pause() // paused - readable.resume(); - assert.strictEqual(state.reading, false); - assert.strictEqual(state.flowing, true); + assert.strictEqual(state.reading, false) + assert.strictEqual(state.flowing, false) + readable.resume() + assert.strictEqual(state.reading, false) + assert.strictEqual(state.flowing, true) // add chunk to front - // add chunk to front - readable.unshift('unshifted'); + readable.unshift('unshifted') // end - // end - readable.push(null); + readable.push(null) } - { const readable = new Readable({ read(size) {} - }); - - const state = readable._readableState; - - // Starting off with false initially. - assert.strictEqual(state.reading, false); - assert.strictEqual(state.readingMore, false); - - const onReadable = common.mustNotCall; - - readable.on('readable', onReadable); - - readable.on('data', common.mustCall((data) => { - // Reading as long as we've not ended - assert.strictEqual(state.reading, !state.ended); - }, 2)); - - readable.removeListener('readable', onReadable); + }) + const state = readable._readableState // Starting off with false initially. + + assert.strictEqual(state.reading, false) + assert.strictEqual(state.readingMore, false) + const onReadable = common.mustNotCall + readable.on('readable', onReadable) + readable.on( + 'data', + common.mustCall((data) => { + // Reading as long as we've not ended + assert.strictEqual(state.reading, !state.ended) + }, 2) + ) + readable.removeListener('readable', onReadable) function onStreamEnd() { // End of stream; state.reading is false // And so should be readingMore. - assert.strictEqual(state.readingMore, false); - assert.strictEqual(state.reading, false); + assert.strictEqual(state.readingMore, false) + assert.strictEqual(state.reading, false) } - readable.on('end', common.mustCall(onStreamEnd)); - readable.push('pushed'); - - // We are still not flowing, we will be resuming in the next tick - assert.strictEqual(state.flowing, false); + readable.on('end', common.mustCall(onStreamEnd)) + readable.push('pushed') // We are still not flowing, we will be resuming in the next tick - // Wait for nextTick, so the readableListener flag resets - process.nextTick(function() { - readable.resume(); + assert.strictEqual(state.flowing, false) // Wait for nextTick, so the readableListener flag resets - // Stop emitting 'data' events - assert.strictEqual(state.flowing, true); - readable.pause(); + process.nextTick(function () { + readable.resume() // Stop emitting 'data' events - // paused - assert.strictEqual(state.flowing, false); + assert.strictEqual(state.flowing, true) + readable.pause() // paused - readable.resume(); - assert.strictEqual(state.flowing, true); + assert.strictEqual(state.flowing, false) + readable.resume() + assert.strictEqual(state.flowing, true) // add chunk to front - // add chunk to front - readable.unshift('unshifted'); + readable.unshift('unshifted') // end - // end - readable.push(null); - }); + readable.push(null) + }) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js index 00c7ebec3d..22bf32ea62 100644 --- a/test/parallel/test-stream-readable-resume-hwm.js +++ b/test/parallel/test-stream-readable-resume-hwm.js @@ -1,36 +1,38 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// readable.resume() should not lead to a ._read() call being scheduled + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') // readable.resume() should not lead to a ._read() call being scheduled // when we exceed the high water mark already. const readable = new Readable({ read: common.mustNotCall(), highWaterMark: 100 -}); - -// Fill up the internal buffer so that we definitely exceed the HWM: -for (let i = 0; i < 10; i++) - readable.push('a'.repeat(200)); +}) // Fill up the internal buffer so that we definitely exceed the HWM: -// Call resume, and pause after one chunk. +for (let i = 0; i < 10; i++) readable.push('a'.repeat(200)) // Call resume, and pause after one chunk. // The .pause() is just so that we don’t empty the buffer fully, which would // be a valid reason to call ._read(). -readable.resume(); -readable.once('data', common.mustCall(() => readable.pause())); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +readable.resume() +readable.once( + 'data', + common.mustCall(() => readable.pause()) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js index 23ae61a66a..a696ba6d5a 100644 --- a/test/parallel/test-stream-readable-resumeScheduled.js +++ b/test/parallel/test-stream-readable-resumeScheduled.js @@ -1,80 +1,87 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -// Testing Readable Stream resumeScheduled state + error() {} +} +const common = require('../common') // Testing Readable Stream resumeScheduled state + +const assert = require('assert') -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); +const { Readable, Writable } = require('../../lib/ours/index') { // pipe() test case - const r = new Readable({ read() {} }); - const w = new Writable(); - - // resumeScheduled should start = `false`. - assert.strictEqual(r._readableState.resumeScheduled, false); - - // Calling pipe() should change the state value = true. - r.pipe(w); - assert.strictEqual(r._readableState.resumeScheduled, true); - - process.nextTick(common.mustCall(() => { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); + const r = new Readable({ + read() {} + }) + const w = new Writable() // resumeScheduled should start = `false`. + + assert.strictEqual(r._readableState.resumeScheduled, false) // Calling pipe() should change the state value = true. + + r.pipe(w) + assert.strictEqual(r._readableState.resumeScheduled, true) + process.nextTick( + common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false) + }) + ) } - { // 'data' listener test case - const r = new Readable({ read() {} }); - - // resumeScheduled should start = `false`. - assert.strictEqual(r._readableState.resumeScheduled, false); - - r.push(Buffer.from([1, 2, 3])); - - // Adding 'data' listener should change the state value - r.on('data', common.mustCall(() => { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - assert.strictEqual(r._readableState.resumeScheduled, true); - - process.nextTick(common.mustCall(() => { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); + const r = new Readable({ + read() {} + }) // resumeScheduled should start = `false`. + + assert.strictEqual(r._readableState.resumeScheduled, false) + r.push(Buffer.from([1, 2, 3])) // Adding 'data' listener should change the state value + + r.on( + 'data', + common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false) + }) + ) + assert.strictEqual(r._readableState.resumeScheduled, true) + process.nextTick( + common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false) + }) + ) } - { // resume() test case - const r = new Readable({ read() {} }); - - // resumeScheduled should start = `false`. - assert.strictEqual(r._readableState.resumeScheduled, false); - - // Calling resume() should change the state value. - r.resume(); - assert.strictEqual(r._readableState.resumeScheduled, true); - - r.on('resume', common.mustCall(() => { - // The state value should be `false` again - assert.strictEqual(r._readableState.resumeScheduled, false); - })); - - process.nextTick(common.mustCall(() => { - assert.strictEqual(r._readableState.resumeScheduled, false); - })); + const r = new Readable({ + read() {} + }) // resumeScheduled should start = `false`. + + assert.strictEqual(r._readableState.resumeScheduled, false) // Calling resume() should change the state value. + + r.resume() + assert.strictEqual(r._readableState.resumeScheduled, true) + r.on( + 'resume', + common.mustCall(() => { + // The state value should be `false` again + assert.strictEqual(r._readableState.resumeScheduled, false) + }) + ) + process.nextTick( + common.mustCall(() => { + assert.strictEqual(r._readableState.resumeScheduled, false) + }) + ) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js index 960aa22203..11a0ed83e6 100644 --- a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js +++ b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js @@ -1,75 +1,73 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const { Readable } = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, -{ - // Call .setEncoding() while there are bytes already in the buffer. - const r = new Readable({ read() {} }); + error() {} +} +require('../common') - r.push(Buffer.from('a')); - r.push(Buffer.from('b')); +const { Readable } = require('../../lib/ours/index') - r.setEncoding('utf8'); - const chunks = []; - r.on('data', (chunk) => chunks.push(chunk)); +const assert = require('assert') +{ + // Call .setEncoding() while there are bytes already in the buffer. + const r = new Readable({ + read() {} + }) + r.push(Buffer.from('a')) + r.push(Buffer.from('b')) + r.setEncoding('utf8') + const chunks = [] + r.on('data', (chunk) => chunks.push(chunk)) process.nextTick(() => { - assert.deepStrictEqual(chunks, ['ab']); - }); + assert.deepStrictEqual(chunks, ['ab']) + }) } - { // Call .setEncoding() while the buffer contains a complete, // but chunked character. - const r = new Readable({ read() {} }); - - r.push(Buffer.from([0xf0])); - r.push(Buffer.from([0x9f])); - r.push(Buffer.from([0x8e])); - r.push(Buffer.from([0x89])); - - r.setEncoding('utf8'); - const chunks = []; - r.on('data', (chunk) => chunks.push(chunk)); - + const r = new Readable({ + read() {} + }) + r.push(Buffer.from([0xf0])) + r.push(Buffer.from([0x9f])) + r.push(Buffer.from([0x8e])) + r.push(Buffer.from([0x89])) + r.setEncoding('utf8') + const chunks = [] + r.on('data', (chunk) => chunks.push(chunk)) process.nextTick(() => { - assert.deepStrictEqual(chunks, ['🎉']); - }); + assert.deepStrictEqual(chunks, ['🎉']) + }) } - { // Call .setEncoding() while the buffer contains an incomplete character, // and finish the character later. - const r = new Readable({ read() {} }); - - r.push(Buffer.from([0xf0])); - r.push(Buffer.from([0x9f])); - - r.setEncoding('utf8'); - - r.push(Buffer.from([0x8e])); - r.push(Buffer.from([0x89])); - - const chunks = []; - r.on('data', (chunk) => chunks.push(chunk)); - + const r = new Readable({ + read() {} + }) + r.push(Buffer.from([0xf0])) + r.push(Buffer.from([0x9f])) + r.setEncoding('utf8') + r.push(Buffer.from([0x8e])) + r.push(Buffer.from([0x89])) + const chunks = [] + r.on('data', (chunk) => chunks.push(chunk)) process.nextTick(() => { - assert.deepStrictEqual(chunks, ['🎉']); - }); + assert.deepStrictEqual(chunks, ['🎉']) + }) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-setEncoding-null.js b/test/parallel/test-stream-readable-setEncoding-null.js index 979d0690e6..0ee0e8ecfc 100644 --- a/test/parallel/test-stream-readable-setEncoding-null.js +++ b/test/parallel/test-stream-readable-setEncoding-null.js @@ -1,30 +1,33 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; - -require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + error() {} +} +require('../common') -{ - const readable = new Readable({ encoding: 'hex' }); - assert.strictEqual(readable._readableState.encoding, 'hex'); +const assert = require('assert') - readable.setEncoding(null); +const { Readable } = require('../../lib/ours/index') - assert.strictEqual(readable._readableState.encoding, 'utf8'); +{ + const readable = new Readable({ + encoding: 'hex' + }) + assert.strictEqual(readable._readableState.encoding, 'hex') + readable.setEncoding(null) + assert.strictEqual(readable._readableState.encoding, 'utf8') } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-unpipe-resume.js b/test/parallel/test-stream-readable-unpipe-resume.js index 5ef9c1947e..deb18e5082 100644 --- a/test/parallel/test-stream-readable-unpipe-resume.js +++ b/test/parallel/test-stream-readable-unpipe-resume.js @@ -1,35 +1,34 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const fs = require('fs'); + error() {} +} +const common = require('../common') -const readStream = fs.createReadStream(process.execPath); +const stream = require('../../lib/ours/index') +const fs = require('fs') + +const readStream = fs.createReadStream(process.execPath) const transformStream = new stream.Transform({ transform: common.mustCall(() => { - readStream.unpipe(); - readStream.resume(); + readStream.unpipe() + readStream.resume() }) -}); - -readStream.on('end', common.mustCall()); - -readStream - .pipe(transformStream) - .resume(); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +readStream.on('end', common.mustCall()) +readStream.pipe(transformStream).resume() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-unshift.js b/test/parallel/test-stream-readable-unshift.js index 73ba9f4668..380706e0be 100644 --- a/test/parallel/test-stream-readable-unshift.js +++ b/test/parallel/test-stream-readable-unshift.js @@ -1,185 +1,207 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); - -{ - // Check that strings are saved as Buffer - const readable = new Readable({ read() {} }); - - const string = 'abc'; + error() {} +} +const common = require('../common') - readable.on('data', common.mustCall((chunk) => { - assert(Buffer.isBuffer(chunk)); - assert.strictEqual(chunk.toString('utf8'), string); - }, 1)); +const assert = require('assert') - readable.unshift(string); +const { Readable } = require('../../lib/ours/index') +{ + // Check that strings are saved as Buffer + const readable = new Readable({ + read() {} + }) + const string = 'abc' + readable.on( + 'data', + common.mustCall((chunk) => { + assert(Buffer.isBuffer(chunk)) + assert.strictEqual(chunk.toString('utf8'), string) + }, 1) + ) + readable.unshift(string) } - { // Check that data goes at the beginning - const readable = new Readable({ read() {} }); - const unshift = 'front'; - const push = 'back'; - - const expected = [unshift, push]; - readable.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString('utf8'), expected.shift()); - }, 2)); - - - readable.push(push); - readable.unshift(unshift); + const readable = new Readable({ + read() {} + }) + const unshift = 'front' + const push = 'back' + const expected = [unshift, push] + readable.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk.toString('utf8'), expected.shift()) + }, 2) + ) + readable.push(push) + readable.unshift(unshift) } - { // Check that buffer is saved with correct encoding - const readable = new Readable({ read() {} }); - - const encoding = 'base64'; - const string = Buffer.from('abc').toString(encoding); - - readable.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk.toString(encoding), string); - }, 1)); - - readable.unshift(string, encoding); - + const readable = new Readable({ + read() {} + }) + const encoding = 'base64' + const string = Buffer.from('abc').toString(encoding) + readable.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk.toString(encoding), string) + }, 1) + ) + readable.unshift(string, encoding) } - { - - const streamEncoding = 'base64'; + const streamEncoding = 'base64' function checkEncoding(readable) { - // chunk encodings - const encodings = ['utf8', 'binary', 'hex', 'base64']; - const expected = []; - - readable.on('data', common.mustCall((chunk) => { - const { encoding, string } = expected.pop(); - assert.strictEqual(chunk.toString(encoding), string); - }, encodings.length)); + const encodings = ['utf8', 'binary', 'hex', 'base64'] + const expected = [] + readable.on( + 'data', + common.mustCall((chunk) => { + const { encoding, string } = expected.pop() + assert.strictEqual(chunk.toString(encoding), string) + }, encodings.length) + ) for (const encoding of encodings) { - const string = 'abc'; - - // If encoding is the same as the state.encoding the string is + const string = 'abc' // If encoding is the same as the state.encoding the string is // saved as is - const expect = encoding !== streamEncoding ? - Buffer.from(string, encoding).toString(streamEncoding) : string; - expected.push({ encoding, string: expect }); - - readable.unshift(string, encoding); + const expect = encoding !== streamEncoding ? Buffer.from(string, encoding).toString(streamEncoding) : string + expected.push({ + encoding, + string: expect + }) + readable.unshift(string, encoding) } } - const r1 = new Readable({ read() {} }); - r1.setEncoding(streamEncoding); - checkEncoding(r1); - - const r2 = new Readable({ read() {}, encoding: streamEncoding }); - checkEncoding(r2); - + const r1 = new Readable({ + read() {} + }) + r1.setEncoding(streamEncoding) + checkEncoding(r1) + const r2 = new Readable({ + read() {}, + + encoding: streamEncoding + }) + checkEncoding(r2) } - { // Both .push & .unshift should have the same behaviour // When setting an encoding, each chunk should be emitted with that encoding - const encoding = 'base64'; + const encoding = 'base64' function checkEncoding(readable) { - const string = 'abc'; - readable.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk, Buffer.from(string).toString(encoding)); - }, 2)); - - readable.push(string); - readable.unshift(string); + const string = 'abc' + readable.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk, Buffer.from(string).toString(encoding)) + }, 2) + ) + readable.push(string) + readable.unshift(string) } - const r1 = new Readable({ read() {} }); - r1.setEncoding(encoding); - checkEncoding(r1); - - const r2 = new Readable({ read() {}, encoding }); - checkEncoding(r2); - + const r1 = new Readable({ + read() {} + }) + r1.setEncoding(encoding) + checkEncoding(r1) + const r2 = new Readable({ + read() {}, + + encoding + }) + checkEncoding(r2) } - { // Check that ObjectMode works - const readable = new Readable({ objectMode: true, read() {} }); - - const chunks = ['a', 1, {}, []]; - - readable.on('data', common.mustCall((chunk) => { - assert.strictEqual(chunk, chunks.pop()); - }, chunks.length)); + const readable = new Readable({ + objectMode: true, + + read() {} + }) + const chunks = ['a', 1, {}, []] + readable.on( + 'data', + common.mustCall((chunk) => { + assert.strictEqual(chunk, chunks.pop()) + }, chunks.length) + ) for (const chunk of chunks) { - readable.unshift(chunk); + readable.unshift(chunk) } } - { - // Should not throw: https://github.com/nodejs/node/issues/27192 - const highWaterMark = 50; + const highWaterMark = 50 + class ArrayReader extends Readable { constructor(opt) { - super({ highWaterMark }); - // The error happened only when pushing above hwm - this.buffer = new Array(highWaterMark * 2).fill(0).map(String); + super({ + highWaterMark + }) // The error happened only when pushing above hwm + + this.buffer = new Array(highWaterMark * 2).fill(0).map(String) } + _read(size) { while (this.buffer.length) { - const chunk = this.buffer.shift(); + const chunk = this.buffer.shift() + if (!this.buffer.length) { - this.push(chunk); - this.push(null); - return true; + this.push(chunk) + this.push(null) + return true } - if (!this.push(chunk)) - return; + + if (!this.push(chunk)) return } } } function onRead() { - while (null !== (stream.read())) { + while (null !== stream.read()) { // Remove the 'readable' listener before unshifting - stream.removeListener('readable', onRead); - stream.unshift('a'); + stream.removeListener('readable', onRead) + stream.unshift('a') stream.on('data', (chunk) => { - silentConsole.log(chunk.length); - }); - break; + silentConsole.log(chunk.length) + }) + break } } - const stream = new ArrayReader(); - stream.once('readable', common.mustCall(onRead)); - stream.on('end', common.mustCall(() => {})); - + const stream = new ArrayReader() + stream.once('readable', common.mustCall(onRead)) + stream.on( + 'end', + common.mustCall(() => {}) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js index 8d4ae320d7..588bfb2224 100644 --- a/test/parallel/test-stream-readable-with-unimplemented-_read.js +++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js @@ -1,28 +1,34 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Readable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -const readable = new Readable(); + error() {} +} +const common = require('../common') -readable.read(); -readable.on('error', common.expectsError({ - code: 'ERR_METHOD_NOT_IMPLEMENTED', - name: 'Error', - message: 'The _read() method is not implemented' -})); -readable.on('close', common.mustCall()); +const { Readable } = require('../../lib/ours/index') - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const readable = new Readable() +readable.read() +readable.on( + 'error', + common.expectsError({ + code: 'ERR_METHOD_NOT_IMPLEMENTED', + name: 'Error', + message: 'The _read() method is not implemented' + }) +) +readable.on('close', common.mustCall()) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-readableListening-state.js b/test/parallel/test-stream-readableListening-state.js index 10abe32369..5c37e4b148 100644 --- a/test/parallel/test-stream-readableListening-state.js +++ b/test/parallel/test-stream-readableListening-state.js @@ -1,49 +1,52 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -const r = new stream.Readable({ - read: () => {} -}); - -// readableListening state should start in `false`. -assert.strictEqual(r._readableState.readableListening, false); +const assert = require('assert') -r.on('readable', common.mustCall(() => { - // Inside the readable event this state should be true. - assert.strictEqual(r._readableState.readableListening, true); -})); - -r.push(Buffer.from('Testing readableListening state')); +const stream = require('../../lib/ours/index') +const r = new stream.Readable({ + read: () => {} +}) // readableListening state should start in `false`. + +assert.strictEqual(r._readableState.readableListening, false) +r.on( + 'readable', + common.mustCall(() => { + // Inside the readable event this state should be true. + assert.strictEqual(r._readableState.readableListening, true) + }) +) +r.push(Buffer.from('Testing readableListening state')) const r2 = new stream.Readable({ read: () => {} -}); - -// readableListening state should start in `false`. -assert.strictEqual(r2._readableState.readableListening, false); - -r2.on('data', common.mustCall((chunk) => { - // readableListening should be false because we don't have - // a `readable` listener - assert.strictEqual(r2._readableState.readableListening, false); -})); - -r2.push(Buffer.from('Testing readableListening state')); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) // readableListening state should start in `false`. + +assert.strictEqual(r2._readableState.readableListening, false) +r2.on( + 'data', + common.mustCall((chunk) => { + // readableListening should be false because we don't have + // a `readable` listener + assert.strictEqual(r2._readableState.readableListening, false) + }) +) +r2.push(Buffer.from('Testing readableListening state')) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js index b585d3d106..a13eaa627d 100644 --- a/test/parallel/test-stream-reduce.js +++ b/test/parallel/test-stream-reduce.js @@ -1,23 +1,25 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Readable, -} = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') + +const assert = require('assert') function sum(p, c) { - return p + c; + return p + c } { // Does the same thing as `(await stream.toArray()).reduce(...)` - (async () => { + ;(async () => { const tests = [ [[], sum, 0], [[1], sum, 0], @@ -25,123 +27,173 @@ function sum(p, c) { [[...Array(100).keys()], sum, 0], [['a', 'b', 'c'], sum, ''], [[1, 2], sum], - [[1, 2, 3], (x, y) => y], - ]; + [[1, 2, 3], (x, y) => y] + ] + for (const [values, fn, initial] of tests) { - const streamReduce = await Readable.from(values) - .reduce(fn, initial); - const arrayReduce = values.reduce(fn, initial); - assert.deepStrictEqual(streamReduce, arrayReduce); - } - // Does the same thing as `(await stream.toArray()).reduce(...)` with an + const streamReduce = await Readable.from(values).reduce(fn, initial) + const arrayReduce = values.reduce(fn, initial) + assert.deepStrictEqual(streamReduce, arrayReduce) + } // Does the same thing as `(await stream.toArray()).reduce(...)` with an // asynchronous reducer + for (const [values, fn, initial] of tests) { const streamReduce = await Readable.from(values) - .map(async (x) => x) - .reduce(fn, initial); - const arrayReduce = values.reduce(fn, initial); - assert.deepStrictEqual(streamReduce, arrayReduce); + .map(async (x) => x) + .reduce(fn, initial) + const arrayReduce = values.reduce(fn, initial) + assert.deepStrictEqual(streamReduce, arrayReduce) } - })().then(common.mustCall()); + })().then(common.mustCall()) } { // Works with an async reducer, with or without initial value - (async () => { - const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0); - assert.strictEqual(six, 6); - })().then(common.mustCall()); - (async () => { - const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c); - assert.strictEqual(six, 6); - })().then(common.mustCall()); + ;(async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0) + assert.strictEqual(six, 6) + })().then(common.mustCall()) + ;(async () => { + const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c) + assert.strictEqual(six, 6) + })().then(common.mustCall()) } { // Works lazily - assert.rejects(Readable.from([1, 2, 3, 4, 5, 6]) - .map(common.mustCall((x) => { - return x; - }, 3)) // Two consumed and one buffered by `map` due to default concurrency - .reduce(async (p, c) => { - if (p === 1) { - throw new Error('boom'); - } - return c; - }, 0) - , /boom/).then(common.mustCall()); -} + assert + .rejects( + Readable.from([1, 2, 3, 4, 5, 6]) + .map( + common.mustCall((x) => { + return x + }, 3) + ) // Two consumed and one buffered by `map` due to default concurrency + .reduce(async (p, c) => { + if (p === 1) { + throw new Error('boom') + } + return c + }, 0), + /boom/ + ) + .then(common.mustCall()) +} { // Support for AbortSignal - const ac = new AbortController(); - assert.rejects(async () => { - await Readable.from([1, 2, 3]).reduce(async (p, c) => { - if (c === 3) { - await new Promise(() => {}); // Explicitly do not pass signal here + const ac = new AbortController() + assert + .rejects( + async () => { + await Readable.from([1, 2, 3]).reduce( + async (p, c) => { + if (c === 3) { + await new Promise(() => {}) // Explicitly do not pass signal here + } + + return Promise.resolve() + }, + 0, + { + signal: ac.signal + } + ) + }, + { + name: 'AbortError' } - return Promise.resolve(); - }, 0, { signal: ac.signal }); - }, { - name: 'AbortError', - }).then(common.mustCall()); - ac.abort(); + ) + .then(common.mustCall()) + ac.abort() } - - { // Support for AbortSignal - pre aborted - const stream = Readable.from([1, 2, 3]); - assert.rejects(async () => { - await stream.reduce(async (p, c) => { - if (c === 3) { - await new Promise(() => {}); // Explicitly do not pass signal here + const stream = Readable.from([1, 2, 3]) + assert + .rejects( + async () => { + await stream.reduce( + async (p, c) => { + if (c === 3) { + await new Promise(() => {}) // Explicitly do not pass signal here + } + + return Promise.resolve() + }, + 0, + { + signal: AbortSignal.abort() + } + ) + }, + { + name: 'AbortError' } - return Promise.resolve(); - }, 0, { signal: AbortSignal.abort() }); - }, { - name: 'AbortError', - }).then(common.mustCall(() => { - assert.strictEqual(stream.destroyed, true); - })); + ) + .then( + common.mustCall(() => { + assert.strictEqual(stream.destroyed, true) + }) + ) } - { // Support for AbortSignal - deep - const stream = Readable.from([1, 2, 3]); - assert.rejects(async () => { - await stream.reduce(async (p, c, { signal }) => { - signal.addEventListener('abort', common.mustCall(), { once: true }); - if (c === 3) { - await new Promise(() => {}); // Explicitly do not pass signal here + const stream = Readable.from([1, 2, 3]) + assert + .rejects( + async () => { + await stream.reduce( + async (p, c, { signal }) => { + signal.addEventListener('abort', common.mustCall(), { + once: true + }) + + if (c === 3) { + await new Promise(() => {}) // Explicitly do not pass signal here + } + + return Promise.resolve() + }, + 0, + { + signal: AbortSignal.abort() + } + ) + }, + { + name: 'AbortError' } - return Promise.resolve(); - }, 0, { signal: AbortSignal.abort() }); - }, { - name: 'AbortError', - }).then(common.mustCall(() => { - assert.strictEqual(stream.destroyed, true); - })); + ) + .then( + common.mustCall(() => { + assert.strictEqual(stream.destroyed, true) + }) + ) } - { // Error cases - assert.rejects(() => Readable.from([]).reduce(1), /TypeError/); - assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/); - assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, 1), /ERR_INVALID_ARG_TYPE/); - assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, { signal: true }), /ERR_INVALID_ARG_TYPE/); + assert.rejects(() => Readable.from([]).reduce(1), /TypeError/) + assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/) + assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, 1), /ERR_INVALID_ARG_TYPE/) + assert.rejects( + () => + Readable.from([]).reduce((x, y) => x + y, 0, { + signal: true + }), + /ERR_INVALID_ARG_TYPE/ + ) } - { // Test result is a Promise - const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0); - assert.ok(result instanceof Promise); + const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0) + assert.ok(result instanceof Promise) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-some-find-every.mjs b/test/parallel/test-stream-some-find-every.mjs index 34c8e2a8a2..30298d0d07 100644 --- a/test/parallel/test-stream-some-find-every.mjs +++ b/test/parallel/test-stream-some-find-every.mjs @@ -1,183 +1,215 @@ -import * as common from '../common/index.mjs'; -import { setTimeout } from 'timers/promises'; -import { Readable }from '../../lib/ours/index.js'; -import assert from 'assert'; -import tap from 'tap'; - +import * as common from '../common/index.mjs' +import { setTimeout } from 'timers/promises' +import { Readable } from '../../lib/ours/index.js' +import assert from 'assert' +import tap from 'tap' function oneTo5() { - return Readable.from([1, 2, 3, 4, 5]); + return Readable.from([1, 2, 3, 4, 5]) } function oneTo5Async() { return oneTo5().map(async (x) => { - await Promise.resolve(); - return x; - }); + await Promise.resolve() + return x + }) } { // Some, find, and every work with a synchronous stream and predicate - assert.strictEqual(await oneTo5().some((x) => x > 3), true); - assert.strictEqual(await oneTo5().every((x) => x > 3), false); - assert.strictEqual(await oneTo5().find((x) => x > 3), 4); - assert.strictEqual(await oneTo5().some((x) => x > 6), false); - assert.strictEqual(await oneTo5().every((x) => x < 6), true); - assert.strictEqual(await oneTo5().find((x) => x > 6), undefined); - assert.strictEqual(await Readable.from([]).some(() => true), false); - assert.strictEqual(await Readable.from([]).every(() => true), true); - assert.strictEqual(await Readable.from([]).find(() => true), undefined); + assert.strictEqual(await oneTo5().some((x) => x > 3), true) + assert.strictEqual(await oneTo5().every((x) => x > 3), false) + assert.strictEqual(await oneTo5().find((x) => x > 3), 4) + assert.strictEqual(await oneTo5().some((x) => x > 6), false) + assert.strictEqual(await oneTo5().every((x) => x < 6), true) + assert.strictEqual(await oneTo5().find((x) => x > 6), undefined) + assert.strictEqual(await Readable.from([]).some(() => true), false) + assert.strictEqual(await Readable.from([]).every(() => true), true) + assert.strictEqual(await Readable.from([]).find(() => true), undefined) } { // Some, find, and every work with an asynchronous stream and synchronous predicate - assert.strictEqual(await oneTo5Async().some((x) => x > 3), true); - assert.strictEqual(await oneTo5Async().every((x) => x > 3), false); - assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4); - assert.strictEqual(await oneTo5Async().some((x) => x > 6), false); - assert.strictEqual(await oneTo5Async().every((x) => x < 6), true); - assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined); + assert.strictEqual(await oneTo5Async().some((x) => x > 3), true) + assert.strictEqual(await oneTo5Async().every((x) => x > 3), false) + assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4) + assert.strictEqual(await oneTo5Async().some((x) => x > 6), false) + assert.strictEqual(await oneTo5Async().every((x) => x < 6), true) + assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined) } { // Some, find, and every work on synchronous streams with an asynchronous predicate - assert.strictEqual(await oneTo5().some(async (x) => x > 3), true); - assert.strictEqual(await oneTo5().every(async (x) => x > 3), false); - assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4); - assert.strictEqual(await oneTo5().some(async (x) => x > 6), false); - assert.strictEqual(await oneTo5().every(async (x) => x < 6), true); - assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined); + assert.strictEqual(await oneTo5().some(async (x) => x > 3), true) + assert.strictEqual(await oneTo5().every(async (x) => x > 3), false) + assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4) + assert.strictEqual(await oneTo5().some(async (x) => x > 6), false) + assert.strictEqual(await oneTo5().every(async (x) => x < 6), true) + assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined) } { // Some, find, and every work on asynchronous streams with an asynchronous predicate - assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true); - assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false); - assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4); - assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false); - assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true); - assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined); + assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true) + assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false) + assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4) + assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false) + assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true) + assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined) } { async function checkDestroyed(stream) { - await setTimeout(); - assert.strictEqual(stream.destroyed, true); + await setTimeout() + assert.strictEqual(stream.destroyed, true) } { // Some, find, and every short circuit - const someStream = oneTo5(); - await someStream.some(common.mustCall((x) => x > 2, 3)); - await checkDestroyed(someStream); + const someStream = oneTo5() + await someStream.some(common.mustCall((x) => x > 2, 3)) + await checkDestroyed(someStream) - const everyStream = oneTo5(); - await everyStream.every(common.mustCall((x) => x < 3, 3)); - await checkDestroyed(everyStream); + const everyStream = oneTo5() + await everyStream.every(common.mustCall((x) => x < 3, 3)) + await checkDestroyed(everyStream) - const findStream = oneTo5(); - await findStream.find(common.mustCall((x) => x > 1, 2)); - await checkDestroyed(findStream); + const findStream = oneTo5() + await findStream.find(common.mustCall((x) => x > 1, 2)) + await checkDestroyed(findStream) // When short circuit isn't possible the whole stream is iterated - await oneTo5().some(common.mustCall(() => false, 5)); - await oneTo5().every(common.mustCall(() => true, 5)); - await oneTo5().find(common.mustCall(() => false, 5)); + await oneTo5().some(common.mustCall(() => false, 5)) + await oneTo5().every(common.mustCall(() => true, 5)) + await oneTo5().find(common.mustCall(() => false, 5)) } { // Some, find, and every short circuit async stream/predicate - const someStream = oneTo5Async(); - await someStream.some(common.mustCall(async (x) => x > 2, 3)); - await checkDestroyed(someStream); + const someStream = oneTo5Async() + await someStream.some(common.mustCall(async (x) => x > 2, 3)) + await checkDestroyed(someStream) - const everyStream = oneTo5Async(); - await everyStream.every(common.mustCall(async (x) => x < 3, 3)); - await checkDestroyed(everyStream); + const everyStream = oneTo5Async() + await everyStream.every(common.mustCall(async (x) => x < 3, 3)) + await checkDestroyed(everyStream) - const findStream = oneTo5Async(); - await findStream.find(common.mustCall(async (x) => x > 1, 2)); - await checkDestroyed(findStream); + const findStream = oneTo5Async() + await findStream.find(common.mustCall(async (x) => x > 1, 2)) + await checkDestroyed(findStream) // When short circuit isn't possible the whole stream is iterated - await oneTo5Async().some(common.mustCall(async () => false, 5)); - await oneTo5Async().every(common.mustCall(async () => true, 5)); - await oneTo5Async().find(common.mustCall(async () => false, 5)); + await oneTo5Async().some(common.mustCall(async () => false, 5)) + await oneTo5Async().every(common.mustCall(async () => true, 5)) + await oneTo5Async().find(common.mustCall(async () => false, 5)) } } { // Concurrency doesn't affect which value is found. - const found = await Readable.from([1, 2]).find(async (val) => { - if (val === 1) { - await setTimeout(100); - } - return true; - }, { concurrency: 2 }); - assert.strictEqual(found, 1); + const found = await Readable.from([1, 2]).find( + async (val) => { + if (val === 1) { + await setTimeout(100) + } + return true + }, + { concurrency: 2 } + ) + assert.strictEqual(found, 1) } { // Support for AbortSignal for (const op of ['some', 'every', 'find']) { { - const ac = new AbortController(); - assert.rejects(Readable.from([1, 2, 3])[op]( - () => new Promise(() => { }), - { signal: ac.signal } - ), { - name: 'AbortError', - }, `${op} should abort correctly with sync abort`).then(common.mustCall()); - ac.abort(); + const ac = new AbortController() + assert + .rejects( + Readable.from([1, 2, 3])[op](() => new Promise(() => {}), { signal: ac.signal }), + { + name: 'AbortError' + }, + `${op} should abort correctly with sync abort` + ) + .then(common.mustCall()) + ac.abort() } { // Support for pre-aborted AbortSignal - assert.rejects(Readable.from([1, 2, 3])[op]( - () => new Promise(() => { }), - { signal: AbortSignal.abort() } - ), { - name: 'AbortError', - }, `${op} should abort with pre-aborted abort controller`).then(common.mustCall()); + assert + .rejects( + Readable.from([1, 2, 3])[op](() => new Promise(() => {}), { signal: AbortSignal.abort() }), + { + name: 'AbortError' + }, + `${op} should abort with pre-aborted abort controller` + ) + .then(common.mustCall()) } } } { // Error cases for (const op of ['some', 'every', 'find']) { - assert.rejects(async () => { - await Readable.from([1])[op](1); - }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid function`).then(common.mustCall()); - assert.rejects(async () => { - await Readable.from([1])[op]((x) => x, { - concurrency: 'Foo' - }); - }, /ERR_OUT_OF_RANGE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); - assert.rejects(async () => { - await Readable.from([1])[op]((x) => x, 1); - }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); - assert.rejects(async () => { - await Readable.from([1])[op]((x) => x, { - signal: true - }); - }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid signal`).then(common.mustCall()); + assert + .rejects( + async () => { + await Readable.from([1])[op](1) + }, + /ERR_INVALID_ARG_TYPE/, + `${op} should throw for invalid function` + ) + .then(common.mustCall()) + assert + .rejects( + async () => { + await Readable.from([1])[op]((x) => x, { + concurrency: 'Foo' + }) + }, + /ERR_OUT_OF_RANGE/, + `${op} should throw for invalid concurrency` + ) + .then(common.mustCall()) + assert + .rejects( + async () => { + await Readable.from([1])[op]((x) => x, 1) + }, + /ERR_INVALID_ARG_TYPE/, + `${op} should throw for invalid concurrency` + ) + .then(common.mustCall()) + assert + .rejects( + async () => { + await Readable.from([1])[op]((x) => x, { + signal: true + }) + }, + /ERR_INVALID_ARG_TYPE/, + `${op} should throw for invalid signal` + ) + .then(common.mustCall()) } } { for (const op of ['some', 'every', 'find']) { - const stream = oneTo5(); + const stream = oneTo5() Object.defineProperty(stream, 'map', { - value: common.mustNotCall(() => {}), - }); + value: common.mustNotCall(() => {}) + }) // Check that map isn't getting called. - stream[op](() => {}); + stream[op](() => {}) } } - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js index 5b0e3d9de9..7ad66bf0fd 100644 --- a/test/parallel/test-stream-toArray.js +++ b/test/parallel/test-stream-toArray.js @@ -1,108 +1,126 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Readable, -} = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') + +const assert = require('assert') { // Works on a synchronous stream - (async () => { + ;(async () => { const tests = [ [], [1], [1, 2, 3], - Array(100).fill().map((_, i) => i), - ]; + Array(100) + .fill() + .map((_, i) => i) + ] + for (const test of tests) { - const stream = Readable.from(test); - const result = await stream.toArray(); - assert.deepStrictEqual(result, test); + const stream = Readable.from(test) + const result = await stream.toArray() + assert.deepStrictEqual(result, test) } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Works on a non-object-mode stream - (async () => { - const firstBuffer = Buffer.from([1, 2, 3]); - const secondBuffer = Buffer.from([4, 5, 6]); - const stream = Readable.from( - [firstBuffer, secondBuffer], - { objectMode: false }); - const result = await stream.toArray(); - assert.strictEqual(Array.isArray(result), true); - assert.deepStrictEqual(result, [firstBuffer, secondBuffer]); - })().then(common.mustCall()); + ;(async () => { + const firstBuffer = Buffer.from([1, 2, 3]) + const secondBuffer = Buffer.from([4, 5, 6]) + const stream = Readable.from([firstBuffer, secondBuffer], { + objectMode: false + }) + const result = await stream.toArray() + assert.strictEqual(Array.isArray(result), true) + assert.deepStrictEqual(result, [firstBuffer, secondBuffer]) + })().then(common.mustCall()) } - { // Works on an asynchronous stream - (async () => { + ;(async () => { const tests = [ [], [1], [1, 2, 3], - Array(100).fill().map((_, i) => i), - ]; + Array(100) + .fill() + .map((_, i) => i) + ] + for (const test of tests) { - const stream = Readable.from(test).map((x) => Promise.resolve(x)); - const result = await stream.toArray(); - assert.deepStrictEqual(result, test); + const stream = Readable.from(test).map((x) => Promise.resolve(x)) + const result = await stream.toArray() + assert.deepStrictEqual(result, test) } - })().then(common.mustCall()); + })().then(common.mustCall()) } - { // Support for AbortSignal - const ac = new AbortController(); - let stream; - assert.rejects(async () => { - stream = Readable.from([1, 2, 3]).map(async (x) => { - if (x === 3) { - await new Promise(() => {}); // Explicitly do not pass signal here + const ac = new AbortController() + let stream + assert + .rejects( + async () => { + stream = Readable.from([1, 2, 3]).map(async (x) => { + if (x === 3) { + await new Promise(() => {}) // Explicitly do not pass signal here + } + + return Promise.resolve(x) + }) + await stream.toArray({ + signal: ac.signal + }) + }, + { + name: 'AbortError' } - return Promise.resolve(x); - }); - await stream.toArray({ signal: ac.signal }); - }, { - name: 'AbortError', - }).then(common.mustCall(() => { - // Only stops toArray, does not destroy the stream - assert(stream.destroyed, false); - })); - ac.abort(); + ) + .then( + common.mustCall(() => { + // Only stops toArray, does not destroy the stream + assert(stream.destroyed, false) + }) + ) + ac.abort() } { // Test result is a Promise - const result = Readable.from([1, 2, 3, 4, 5]).toArray(); - assert.strictEqual(result instanceof Promise, true); + const result = Readable.from([1, 2, 3, 4, 5]).toArray() + assert.strictEqual(result instanceof Promise, true) } { // Error cases - assert.rejects(async () => { - await Readable.from([1]).toArray(1); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); - - assert.rejects(async () => { - await Readable.from([1]).toArray({ - signal: true - }); - }, /ERR_INVALID_ARG_TYPE/).then(common.mustCall()); + assert + .rejects(async () => { + await Readable.from([1]).toArray(1) + }, /ERR_INVALID_ARG_TYPE/) + .then(common.mustCall()) + assert + .rejects(async () => { + await Readable.from([1]).toArray({ + signal: true + }) + }, /ERR_INVALID_ARG_TYPE/) + .then(common.mustCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-callback-twice.js b/test/parallel/test-stream-transform-callback-twice.js index 8329911f78..0ab6afd278 100644 --- a/test/parallel/test-stream-transform-callback-twice.js +++ b/test/parallel/test-stream-transform-callback-twice.js @@ -1,29 +1,38 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Transform } = require('../../lib/ours/index'); -const stream = new Transform({ - transform(chunk, enc, cb) { cb(); cb(); } -}); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') -stream.on('error', common.expectsError({ - name: 'Error', - message: 'Callback called multiple times', - code: 'ERR_MULTIPLE_CALLBACK' -})); +const { Transform } = require('../../lib/ours/index') -stream.write('foo'); +const stream = new Transform({ + transform(chunk, enc, cb) { + cb() + cb() + } +}) +stream.on( + 'error', + common.expectsError({ + name: 'Error', + message: 'Callback called multiple times', + code: 'ERR_MULTIPLE_CALLBACK' + }) +) +stream.write('foo') +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js index 3901220672..0404839fed 100644 --- a/test/parallel/test-stream-transform-constructor-set-methods.js +++ b/test/parallel/test-stream-transform-constructor-set-methods.js @@ -1,58 +1,59 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const assert = require('assert'); -const { Transform } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -const t = new Transform(); +const assert = require('assert') +const { Transform } = require('../../lib/ours/index') + +const t = new Transform() assert.throws( () => { - t.end(Buffer.from('blerg')); + t.end(Buffer.from('blerg')) }, { name: 'Error', code: 'ERR_METHOD_NOT_IMPLEMENTED', message: 'The _transform() method is not implemented' } -); +) const _transform = common.mustCall((chunk, _, next) => { - next(); -}); + next() +}) const _final = common.mustCall((next) => { - next(); -}); + next() +}) const _flush = common.mustCall((next) => { - next(); -}); + next() +}) const t2 = new Transform({ transform: _transform, flush: _flush, final: _final -}); - -assert.strictEqual(t2._transform, _transform); -assert.strictEqual(t2._flush, _flush); -assert.strictEqual(t2._final, _final); - -t2.end(Buffer.from('blerg')); -t2.resume(); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +assert.strictEqual(t2._transform, _transform) +assert.strictEqual(t2._flush, _flush) +assert.strictEqual(t2._final, _final) +t2.end(Buffer.from('blerg')) +t2.resume() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-destroy.js b/test/parallel/test-stream-transform-destroy.js index 9c27d026b5..895e6a1eeb 100644 --- a/test/parallel/test-stream-transform-destroy.js +++ b/test/parallel/test-stream-transform-destroy.js @@ -1,158 +1,142 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Transform } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Transform } = require('../../lib/ours/index') + +const assert = require('assert') { const transform = new Transform({ transform(chunk, enc, cb) {} - }); - - transform.resume(); - - transform.on('end', common.mustNotCall()); - transform.on('close', common.mustCall()); - transform.on('finish', common.mustNotCall()); - - transform.destroy(); + }) + transform.resume() + transform.on('end', common.mustNotCall()) + transform.on('close', common.mustCall()) + transform.on('finish', common.mustNotCall()) + transform.destroy() } - { const transform = new Transform({ transform(chunk, enc, cb) {} - }); - transform.resume(); - - const expected = new Error('kaboom'); - - transform.on('end', common.mustNotCall()); - transform.on('finish', common.mustNotCall()); - transform.on('close', common.mustCall()); - transform.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - transform.destroy(expected); + }) + transform.resume() + const expected = new Error('kaboom') + transform.on('end', common.mustNotCall()) + transform.on('finish', common.mustNotCall()) + transform.on('close', common.mustCall()) + transform.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + transform.destroy(expected) } - { const transform = new Transform({ transform(chunk, enc, cb) {} - }); - - transform._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(err); - }, 1); - - const expected = new Error('kaboom'); - - transform.on('finish', common.mustNotCall('no finish event')); - transform.on('close', common.mustCall()); - transform.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - transform.destroy(expected); + }) + transform._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb(err) + }, 1) + const expected = new Error('kaboom') + transform.on('finish', common.mustNotCall('no finish event')) + transform.on('close', common.mustCall()) + transform.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + transform.destroy(expected) } - { - const expected = new Error('kaboom'); + const expected = new Error('kaboom') const transform = new Transform({ transform(chunk, enc, cb) {}, - destroy: common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(); - }, 1) - }); - transform.resume(); - - transform.on('end', common.mustNotCall('no end event')); - transform.on('close', common.mustCall()); - transform.on('finish', common.mustNotCall('no finish event')); - // Error is swallowed by the custom _destroy - transform.on('error', common.mustNotCall('no error event')); - - transform.destroy(expected); + destroy: common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb() + }, 1) + }) + transform.resume() + transform.on('end', common.mustNotCall('no end event')) + transform.on('close', common.mustCall()) + transform.on('finish', common.mustNotCall('no finish event')) // Error is swallowed by the custom _destroy + + transform.on('error', common.mustNotCall('no error event')) + transform.destroy(expected) } - { const transform = new Transform({ transform(chunk, enc, cb) {} - }); - - transform._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(); - }, 1); - - transform.destroy(); + }) + transform._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb() + }, 1) + transform.destroy() } - { const transform = new Transform({ transform(chunk, enc, cb) {} - }); - transform.resume(); - - transform._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); + }) + transform.resume() + transform._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) process.nextTick(() => { - this.push(null); - this.end(); - cb(); - }); - }, 1); - - const fail = common.mustNotCall('no event'); - - transform.on('finish', fail); - transform.on('end', fail); - transform.on('close', common.mustCall()); - - transform.destroy(); - - transform.removeListener('end', fail); - transform.removeListener('finish', fail); - transform.on('end', common.mustCall()); - transform.on('finish', common.mustNotCall()); + this.push(null) + this.end() + cb() + }) + }, 1) + const fail = common.mustNotCall('no event') + transform.on('finish', fail) + transform.on('end', fail) + transform.on('close', common.mustCall()) + transform.destroy() + transform.removeListener('end', fail) + transform.removeListener('finish', fail) + transform.on('end', common.mustCall()) + transform.on('finish', common.mustNotCall()) } - { const transform = new Transform({ transform(chunk, enc, cb) {} - }); - - const expected = new Error('kaboom'); - - transform._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(expected); - }, 1); - - transform.on('close', common.mustCall()); - transform.on('finish', common.mustNotCall('no finish event')); - transform.on('end', common.mustNotCall('no end event')); - transform.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - transform.destroy(); + }) + const expected = new Error('kaboom') + transform._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb(expected) + }, 1) + transform.on('close', common.mustCall()) + transform.on('finish', common.mustNotCall('no finish event')) + transform.on('end', common.mustNotCall('no end event')) + transform.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + transform.destroy() } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-final-sync.js b/test/parallel/test-stream-transform-final-sync.js index f88816504b..7999d44cab 100644 --- a/test/parallel/test-stream-transform-final-sync.js +++ b/test/parallel/test-stream-transform-final-sync.js @@ -1,17 +1,19 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const stream = require('../../lib/ours/index'); -let state = 0; + error() {} +} +const common = require('../common') +const assert = require('assert') -// What you do +const stream = require('../../lib/ours/index') + +let state = 0 // What you do // // const stream = new stream.Transform({ // transform: function transformCallback(chunk, _, next) { @@ -63,63 +65,75 @@ let state = 0; const t = new stream.Transform({ objectMode: true, - transform: common.mustCall(function(chunk, _, next) { + transform: common.mustCall(function (chunk, _, next) { // transformCallback part 1 - assert.strictEqual(++state, chunk); - this.push(state); - // transformCallback part 2 - assert.strictEqual(++state, chunk + 2); - process.nextTick(next); + assert.strictEqual(++state, chunk) + this.push(state) // transformCallback part 2 + + assert.strictEqual(++state, chunk + 2) + process.nextTick(next) }, 3), - final: common.mustCall(function(done) { - state++; - // finalCallback part 1 - assert.strictEqual(state, 10); - state++; - // finalCallback part 2 - assert.strictEqual(state, 11); - done(); + final: common.mustCall(function (done) { + state++ // finalCallback part 1 + + assert.strictEqual(state, 10) + state++ // finalCallback part 2 + + assert.strictEqual(state, 11) + done() }, 1), - flush: common.mustCall(function(done) { - state++; - // fluchCallback part 1 - assert.strictEqual(state, 12); - process.nextTick(function() { - state++; - // fluchCallback part 2 - assert.strictEqual(state, 13); - done(); - }); + flush: common.mustCall(function (done) { + state++ // fluchCallback part 1 + + assert.strictEqual(state, 12) + process.nextTick(function () { + state++ // fluchCallback part 2 + + assert.strictEqual(state, 13) + done() + }) }, 1) -}); -t.on('finish', common.mustCall(function() { - state++; - // finishListener - assert.strictEqual(state, 15); -}, 1)); -t.on('end', common.mustCall(function() { - state++; - // endEvent - assert.strictEqual(state, 16); -}, 1)); -t.on('data', common.mustCall(function(d) { - // dataListener - assert.strictEqual(++state, d + 1); -}, 3)); -t.write(1); -t.write(4); -t.end(7, common.mustCall(function() { - state++; - // endMethodCallback - assert.strictEqual(state, 14); -}, 1)); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +t.on( + 'finish', + common.mustCall(function () { + state++ // finishListener + + assert.strictEqual(state, 15) + }, 1) +) +t.on( + 'end', + common.mustCall(function () { + state++ // endEvent + + assert.strictEqual(state, 16) + }, 1) +) +t.on( + 'data', + common.mustCall(function (d) { + // dataListener + assert.strictEqual(++state, d + 1) + }, 3) +) +t.write(1) +t.write(4) +t.end( + 7, + common.mustCall(function () { + state++ // endMethodCallback + + assert.strictEqual(state, 14) + }, 1) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js index 9228159d33..0014844a5b 100644 --- a/test/parallel/test-stream-transform-final.js +++ b/test/parallel/test-stream-transform-final.js @@ -1,17 +1,19 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const stream = require('../../lib/ours/index'); -let state = 0; + error() {} +} +const common = require('../common') +const assert = require('assert') -// What you do: +const stream = require('../../lib/ours/index') + +let state = 0 // What you do: // // const stream = new stream.Transform({ // transform: function transformCallback(chunk, _, next) { @@ -43,7 +45,6 @@ let state = 0; // t.end(7, endMethodCallback); // // The order things are called - // 1. transformCallback part 1 // 2. dataListener // 3. transformCallback part 2 @@ -63,65 +64,77 @@ let state = 0; const t = new stream.Transform({ objectMode: true, - transform: common.mustCall(function(chunk, _, next) { + transform: common.mustCall(function (chunk, _, next) { // transformCallback part 1 - assert.strictEqual(++state, chunk); - this.push(state); - // transformCallback part 2 - assert.strictEqual(++state, chunk + 2); - process.nextTick(next); + assert.strictEqual(++state, chunk) + this.push(state) // transformCallback part 2 + + assert.strictEqual(++state, chunk + 2) + process.nextTick(next) }, 3), - final: common.mustCall(function(done) { - state++; - // finalCallback part 1 - assert.strictEqual(state, 10); - setTimeout(function() { - state++; - // finalCallback part 2 - assert.strictEqual(state, 11); - done(); - }, 100); + final: common.mustCall(function (done) { + state++ // finalCallback part 1 + + assert.strictEqual(state, 10) + setTimeout(function () { + state++ // finalCallback part 2 + + assert.strictEqual(state, 11) + done() + }, 100) }, 1), - flush: common.mustCall(function(done) { - state++; - // flushCallback part 1 - assert.strictEqual(state, 12); - process.nextTick(function() { - state++; - // flushCallback part 2 - assert.strictEqual(state, 13); - done(); - }); + flush: common.mustCall(function (done) { + state++ // flushCallback part 1 + + assert.strictEqual(state, 12) + process.nextTick(function () { + state++ // flushCallback part 2 + + assert.strictEqual(state, 13) + done() + }) }, 1) -}); -t.on('finish', common.mustCall(function() { - state++; - // finishListener - assert.strictEqual(state, 15); -}, 1)); -t.on('end', common.mustCall(function() { - state++; - // end event - assert.strictEqual(state, 16); -}, 1)); -t.on('data', common.mustCall(function(d) { - // dataListener - assert.strictEqual(++state, d + 1); -}, 3)); -t.write(1); -t.write(4); -t.end(7, common.mustCall(function() { - state++; - // endMethodCallback - assert.strictEqual(state, 14); -}, 1)); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +t.on( + 'finish', + common.mustCall(function () { + state++ // finishListener + + assert.strictEqual(state, 15) + }, 1) +) +t.on( + 'end', + common.mustCall(function () { + state++ // end event + + assert.strictEqual(state, 16) + }, 1) +) +t.on( + 'data', + common.mustCall(function (d) { + // dataListener + assert.strictEqual(++state, d + 1) + }, 3) +) +t.write(1) +t.write(4) +t.end( + 7, + common.mustCall(function () { + state++ // endMethodCallback + + assert.strictEqual(state, 14) + }, 1) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js index 0c229eb8ad..7541048528 100644 --- a/test/parallel/test-stream-transform-flush-data.js +++ b/test/parallel/test-stream-transform-flush-data.js @@ -1,43 +1,43 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); - -const assert = require('assert'); -const Transform = require('../../lib/ours/index').Transform; + error() {} +} +require('../common') +const assert = require('assert') -const expected = 'asdf'; +const Transform = require('../../lib/ours/index').Transform +const expected = 'asdf' function _transform(d, e, n) { - n(); + n() } function _flush(n) { - n(null, expected); + n(null, expected) } const t = new Transform({ transform: _transform, flush: _flush -}); - -t.end(Buffer.from('blerg')); +}) +t.end(Buffer.from('blerg')) t.on('data', (data) => { - assert.strictEqual(data.toString(), expected); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.strictEqual(data.toString(), expected) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js index e9c6d6e96c..e2ae60a7e6 100644 --- a/test/parallel/test-stream-transform-objectmode-falsey-value.js +++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js @@ -18,49 +18,60 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -const stream = require('../../lib/ours/index'); -const PassThrough = stream.PassThrough; +const assert = require('assert') -const src = new PassThrough({ objectMode: true }); -const tx = new PassThrough({ objectMode: true }); -const dest = new PassThrough({ objectMode: true }); +const stream = require('../../lib/ours/index') -const expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]; -const results = []; - -dest.on('data', common.mustCall(function(x) { - results.push(x); -}, expect.length)); - -src.pipe(tx).pipe(dest); +const PassThrough = stream.PassThrough +const src = new PassThrough({ + objectMode: true +}) +const tx = new PassThrough({ + objectMode: true +}) +const dest = new PassThrough({ + objectMode: true +}) +const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] +const results = [] +dest.on( + 'data', + common.mustCall(function (x) { + results.push(x) + }, expect.length) +) +src.pipe(tx).pipe(dest) +let i = -1 +const int = setInterval( + common.mustCall(function () { + if (results.length === expect.length) { + src.end() + clearInterval(int) + assert.deepStrictEqual(results, expect) + } else { + src.write(i++) + } + }, expect.length + 1), + 1 +) +/* replacement start */ -let i = -1; -const int = setInterval(common.mustCall(function() { - if (results.length === expect.length) { - src.end(); - clearInterval(int); - assert.deepStrictEqual(results, expect); +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') } else { - src.write(i++); + tap.fail(`test failed - exited code ${code}`) } -}, expect.length + 1), 1); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js index bd4fbcd579..2f7f9ee3e7 100644 --- a/test/parallel/test-stream-transform-split-highwatermark.js +++ b/test/parallel/test-stream-transform-split-highwatermark.js @@ -1,107 +1,132 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Transform, Readable, Writable } = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') + +const { Transform, Readable, Writable } = require('../../lib/ours/index') -const DEFAULT = 16 * 1024; +const DEFAULT = 16 * 1024 function testTransform(expectedReadableHwm, expectedWritableHwm, options) { - const t = new Transform(options); - assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm); - assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm); -} + const t = new Transform(options) + assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm) + assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm) +} // Test overriding defaultHwm -// Test overriding defaultHwm -testTransform(666, DEFAULT, { readableHighWaterMark: 666 }); -testTransform(DEFAULT, 777, { writableHighWaterMark: 777 }); +testTransform(666, DEFAULT, { + readableHighWaterMark: 666 +}) +testTransform(DEFAULT, 777, { + writableHighWaterMark: 777 +}) testTransform(666, 777, { readableHighWaterMark: 666, - writableHighWaterMark: 777, -}); + writableHighWaterMark: 777 +}) // test 0 overriding defaultHwm -// test 0 overriding defaultHwm -testTransform(0, DEFAULT, { readableHighWaterMark: 0 }); -testTransform(DEFAULT, 0, { writableHighWaterMark: 0 }); +testTransform(0, DEFAULT, { + readableHighWaterMark: 0 +}) +testTransform(DEFAULT, 0, { + writableHighWaterMark: 0 +}) // Test highWaterMark overriding -// Test highWaterMark overriding testTransform(555, 555, { highWaterMark: 555, - readableHighWaterMark: 666, -}); + readableHighWaterMark: 666 +}) testTransform(555, 555, { highWaterMark: 555, - writableHighWaterMark: 777, -}); + writableHighWaterMark: 777 +}) testTransform(555, 555, { highWaterMark: 555, readableHighWaterMark: 666, - writableHighWaterMark: 777, -}); + writableHighWaterMark: 777 +}) // Test highWaterMark = 0 overriding -// Test highWaterMark = 0 overriding testTransform(0, 0, { highWaterMark: 0, - readableHighWaterMark: 666, -}); + readableHighWaterMark: 666 +}) testTransform(0, 0, { highWaterMark: 0, - writableHighWaterMark: 777, -}); + writableHighWaterMark: 777 +}) testTransform(0, 0, { highWaterMark: 0, readableHighWaterMark: 666, - writableHighWaterMark: 777, -}); + writableHighWaterMark: 777 +}) // Test undefined, null +;[undefined, null].forEach((v) => { + testTransform(DEFAULT, DEFAULT, { + readableHighWaterMark: v + }) + testTransform(DEFAULT, DEFAULT, { + writableHighWaterMark: v + }) + testTransform(666, DEFAULT, { + highWaterMark: v, + readableHighWaterMark: 666 + }) + testTransform(DEFAULT, 777, { + highWaterMark: v, + writableHighWaterMark: 777 + }) +}) // test NaN -// Test undefined, null -[undefined, null].forEach((v) => { - testTransform(DEFAULT, DEFAULT, { readableHighWaterMark: v }); - testTransform(DEFAULT, DEFAULT, { writableHighWaterMark: v }); - testTransform(666, DEFAULT, { highWaterMark: v, readableHighWaterMark: 666 }); - testTransform(DEFAULT, 777, { highWaterMark: v, writableHighWaterMark: 777 }); -}); - -// test NaN { - assert.throws(() => { - new Transform({ readableHighWaterMark: NaN }); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.readableHighWaterMark' is invalid. " + - 'Received NaN' - }); - - assert.throws(() => { - new Transform({ writableHighWaterMark: NaN }); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.writableHighWaterMark' is invalid. " + - 'Received NaN' - }); -} + assert.throws( + () => { + new Transform({ + readableHighWaterMark: NaN + }) + }, + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.readableHighWaterMark' is invalid. " + 'Received NaN' + } + ) + assert.throws( + () => { + new Transform({ + writableHighWaterMark: NaN + }) + }, + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.writableHighWaterMark' is invalid. " + 'Received NaN' + } + ) +} // Test non Duplex streams ignore the options -// Test non Duplex streams ignore the options { - const r = new Readable({ readableHighWaterMark: 666 }); - assert.strictEqual(r._readableState.highWaterMark, DEFAULT); - const w = new Writable({ writableHighWaterMark: 777 }); - assert.strictEqual(w._writableState.highWaterMark, DEFAULT); + const r = new Readable({ + readableHighWaterMark: 666 + }) + assert.strictEqual(r._readableState.highWaterMark, DEFAULT) + const w = new Writable({ + writableHighWaterMark: 777 + }) + assert.strictEqual(w._writableState.highWaterMark, DEFAULT) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js index 14461e9fac..db93fe9342 100644 --- a/test/parallel/test-stream-transform-split-objectmode.js +++ b/test/parallel/test-stream-transform-split-objectmode.js @@ -18,79 +18,76 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - - - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); - -const Transform = require('../../lib/ours/index').Transform; - -const parser = new Transform({ readableObjectMode: true }); - -assert(parser._readableState.objectMode); -assert(!parser._writableState.objectMode); -assert.strictEqual(parser.readableHighWaterMark, 16); -assert.strictEqual(parser.writableHighWaterMark, 16 * 1024); -assert.strictEqual(parser.readableHighWaterMark, - parser._readableState.highWaterMark); -assert.strictEqual(parser.writableHighWaterMark, - parser._writableState.highWaterMark); - -parser._transform = function(chunk, enc, callback) { - callback(null, { val: chunk[0] }); -}; - -let parsed; - -parser.on('data', function(obj) { - parsed = obj; -}); - -parser.end(Buffer.from([42])); - -process.on('exit', function() { - assert.strictEqual(parsed.val, 42); -}); - - -const serializer = new Transform({ writableObjectMode: true }); - -assert(!serializer._readableState.objectMode); -assert(serializer._writableState.objectMode); -assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024); -assert.strictEqual(serializer.writableHighWaterMark, 16); -assert.strictEqual(parser.readableHighWaterMark, - parser._readableState.highWaterMark); -assert.strictEqual(parser.writableHighWaterMark, - parser._writableState.highWaterMark); - -serializer._transform = function(obj, _, callback) { - callback(null, Buffer.from([obj.val])); -}; - -let serialized; - -serializer.on('data', function(chunk) { - serialized = chunk; -}); - -serializer.write({ val: 42 }); - -process.on('exit', function() { - assert.strictEqual(serialized[0], 42); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +'use strict' + +const tap = require('tap') + +const silentConsole = { + log() {}, + + error() {} +} +require('../common') + +const assert = require('assert') + +const Transform = require('../../lib/ours/index').Transform + +const parser = new Transform({ + readableObjectMode: true +}) +assert(parser._readableState.objectMode) +assert(!parser._writableState.objectMode) +assert.strictEqual(parser.readableHighWaterMark, 16) +assert.strictEqual(parser.writableHighWaterMark, 16 * 1024) +assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark) +assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark) + +parser._transform = function (chunk, enc, callback) { + callback(null, { + val: chunk[0] + }) +} + +let parsed +parser.on('data', function (obj) { + parsed = obj +}) +parser.end(Buffer.from([42])) +process.on('exit', function () { + assert.strictEqual(parsed.val, 42) +}) +const serializer = new Transform({ + writableObjectMode: true +}) +assert(!serializer._readableState.objectMode) +assert(serializer._writableState.objectMode) +assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024) +assert.strictEqual(serializer.writableHighWaterMark, 16) +assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark) +assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark) + +serializer._transform = function (obj, _, callback) { + callback(null, Buffer.from([obj.val])) +} + +let serialized +serializer.on('data', function (chunk) { + serialized = chunk +}) +serializer.write({ + val: 42 +}) +process.on('exit', function () { + assert.strictEqual(serialized[0], 42) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-uint8array.js b/test/parallel/test-stream-uint8array.js index 6cd8e19ac8..a57e13da16 100644 --- a/test/parallel/test-stream-uint8array.js +++ b/test/parallel/test-stream-uint8array.js @@ -1,116 +1,105 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Readable, Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') -const ABC = new Uint8Array([0x41, 0x42, 0x43]); -const DEF = new Uint8Array([0x44, 0x45, 0x46]); -const GHI = new Uint8Array([0x47, 0x48, 0x49]); +const { Readable, Writable } = require('../../lib/ours/index') +const ABC = new Uint8Array([0x41, 0x42, 0x43]) +const DEF = new Uint8Array([0x44, 0x45, 0x46]) +const GHI = new Uint8Array([0x47, 0x48, 0x49]) { // Simple Writable test. - - let n = 0; + let n = 0 const writable = new Writable({ write: common.mustCall((chunk, encoding, cb) => { - assert(chunk instanceof Buffer); + assert(chunk instanceof Buffer) + if (n++ === 0) { - assert.strictEqual(String(chunk), 'ABC'); + assert.strictEqual(String(chunk), 'ABC') } else { - assert.strictEqual(String(chunk), 'DEF'); + assert.strictEqual(String(chunk), 'DEF') } - cb(); + cb() }, 2) - }); - - writable.write(ABC); - writable.end(DEF); + }) + writable.write(ABC) + writable.end(DEF) } - { // Writable test, pass in Uint8Array in object mode. - const writable = new Writable({ objectMode: true, write: common.mustCall((chunk, encoding, cb) => { - assert(!(chunk instanceof Buffer)); - assert(chunk instanceof Uint8Array); - assert.strictEqual(chunk, ABC); - assert.strictEqual(encoding, 'utf8'); - cb(); + assert(!(chunk instanceof Buffer)) + assert(chunk instanceof Uint8Array) + assert.strictEqual(chunk, ABC) + assert.strictEqual(encoding, 'utf8') + cb() }) - }); - - writable.end(ABC); + }) + writable.end(ABC) } - { // Writable test, multiple writes carried out via writev. - let callback; - + let callback const writable = new Writable({ write: common.mustCall((chunk, encoding, cb) => { - assert(chunk instanceof Buffer); - assert.strictEqual(encoding, 'buffer'); - assert.strictEqual(String(chunk), 'ABC'); - callback = cb; + assert(chunk instanceof Buffer) + assert.strictEqual(encoding, 'buffer') + assert.strictEqual(String(chunk), 'ABC') + callback = cb }), writev: common.mustCall((chunks, cb) => { - assert.strictEqual(chunks.length, 2); - assert.strictEqual(chunks[0].encoding, 'buffer'); - assert.strictEqual(chunks[1].encoding, 'buffer'); - assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI'); + assert.strictEqual(chunks.length, 2) + assert.strictEqual(chunks[0].encoding, 'buffer') + assert.strictEqual(chunks[1].encoding, 'buffer') + assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI') }) - }); - - writable.write(ABC); - writable.write(DEF); - writable.end(GHI); - callback(); + }) + writable.write(ABC) + writable.write(DEF) + writable.end(GHI) + callback() } - { // Simple Readable test. const readable = new Readable({ read() {} - }); - - readable.push(DEF); - readable.unshift(ABC); - - const buf = readable.read(); - assert(buf instanceof Buffer); - assert.deepStrictEqual([...buf], [...ABC, ...DEF]); + }) + readable.push(DEF) + readable.unshift(ABC) + const buf = readable.read() + assert(buf instanceof Buffer) + assert.deepStrictEqual([...buf], [...ABC, ...DEF]) } - { // Readable test, setEncoding. const readable = new Readable({ read() {} - }); - - readable.setEncoding('utf8'); - - readable.push(DEF); - readable.unshift(ABC); - - const out = readable.read(); - assert.strictEqual(out, 'ABCDEF'); + }) + readable.setEncoding('utf8') + readable.push(DEF) + readable.unshift(ABC) + const out = readable.read() + assert.strictEqual(out, 'ABCDEF') } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js index 8bfedef105..0ce62499ed 100644 --- a/test/parallel/test-stream-unpipe-event.js +++ b/test/parallel/test-stream-unpipe-event.js @@ -1,100 +1,109 @@ +'use strict' - 'use strict' +const tap = require('tap') + +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Writable, Readable } = require('../../lib/ours/index') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Writable, Readable } = require('../../lib/ours/index'); class NullWriteable extends Writable { _write(chunk, encoding, callback) { - return callback(); + return callback() } } + class QuickEndReadable extends Readable { _read() { - this.push(null); + this.push(null) } } + class NeverEndReadable extends Readable { _read() {} } { - const dest = new NullWriteable(); - const src = new QuickEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustCall()); - src.pipe(dest); + const dest = new NullWriteable() + const src = new QuickEndReadable() + dest.on('pipe', common.mustCall()) + dest.on('unpipe', common.mustCall()) + src.pipe(dest) setImmediate(() => { - assert.strictEqual(src._readableState.pipes.length, 0); - }); + assert.strictEqual(src._readableState.pipes.length, 0) + }) } - { - const dest = new NullWriteable(); - const src = new NeverEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); - src.pipe(dest); + const dest = new NullWriteable() + const src = new NeverEndReadable() + dest.on('pipe', common.mustCall()) + dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')) + src.pipe(dest) setImmediate(() => { - assert.strictEqual(src._readableState.pipes.length, 1); - }); + assert.strictEqual(src._readableState.pipes.length, 1) + }) } - { - const dest = new NullWriteable(); - const src = new NeverEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustCall()); - src.pipe(dest); - src.unpipe(dest); + const dest = new NullWriteable() + const src = new NeverEndReadable() + dest.on('pipe', common.mustCall()) + dest.on('unpipe', common.mustCall()) + src.pipe(dest) + src.unpipe(dest) setImmediate(() => { - assert.strictEqual(src._readableState.pipes.length, 0); - }); + assert.strictEqual(src._readableState.pipes.length, 0) + }) } - { - const dest = new NullWriteable(); - const src = new QuickEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustCall()); - src.pipe(dest, { end: false }); + const dest = new NullWriteable() + const src = new QuickEndReadable() + dest.on('pipe', common.mustCall()) + dest.on('unpipe', common.mustCall()) + src.pipe(dest, { + end: false + }) setImmediate(() => { - assert.strictEqual(src._readableState.pipes.length, 0); - }); + assert.strictEqual(src._readableState.pipes.length, 0) + }) } - { - const dest = new NullWriteable(); - const src = new NeverEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')); - src.pipe(dest, { end: false }); + const dest = new NullWriteable() + const src = new NeverEndReadable() + dest.on('pipe', common.mustCall()) + dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted')) + src.pipe(dest, { + end: false + }) setImmediate(() => { - assert.strictEqual(src._readableState.pipes.length, 1); - }); + assert.strictEqual(src._readableState.pipes.length, 1) + }) } - { - const dest = new NullWriteable(); - const src = new NeverEndReadable(); - dest.on('pipe', common.mustCall()); - dest.on('unpipe', common.mustCall()); - src.pipe(dest, { end: false }); - src.unpipe(dest); + const dest = new NullWriteable() + const src = new NeverEndReadable() + dest.on('pipe', common.mustCall()) + dest.on('unpipe', common.mustCall()) + src.pipe(dest, { + end: false + }) + src.unpipe(dest) setImmediate(() => { - assert.strictEqual(src._readableState.pipes.length, 0); - }); + assert.strictEqual(src._readableState.pipes.length, 0) + }) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js index db44b38f79..89dccd6c9f 100644 --- a/test/parallel/test-stream-unshift-empty-chunk.js +++ b/test/parallel/test-stream-unshift-empty-chunk.js @@ -18,78 +18,80 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') -// This test verifies that stream.unshift(Buffer.alloc(0)) or +const assert = require('assert') // This test verifies that stream.unshift(Buffer.alloc(0)) or // stream.unshift('') does not set state.reading=false. -const Readable = require('../../lib/ours/index').Readable; -const r = new Readable(); -let nChunks = 10; -const chunk = Buffer.alloc(10, 'x'); +const Readable = require('../../lib/ours/index').Readable -r._read = function(n) { +const r = new Readable() +let nChunks = 10 +const chunk = Buffer.alloc(10, 'x') + +r._read = function (n) { setImmediate(() => { - r.push(--nChunks === 0 ? null : chunk); - }); -}; + r.push(--nChunks === 0 ? null : chunk) + }) +} -let readAll = false; -const seen = []; +let readAll = false +const seen = [] r.on('readable', () => { - let chunk; + let chunk + while ((chunk = r.read()) !== null) { - seen.push(chunk.toString()); - // Simulate only reading a certain amount of the data, + seen.push(chunk.toString()) // Simulate only reading a certain amount of the data, // and then putting the rest of the chunk back into the // stream, like a parser might do. We just fill it with // 'y' so that it's easy to see which bits were touched, // and which were not. - const putBack = Buffer.alloc(readAll ? 0 : 5, 'y'); - readAll = !readAll; - r.unshift(putBack); - } -}); - -const expect = - [ 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy', - 'xxxxxxxxxx', - 'yyyyy' ]; + const putBack = Buffer.alloc(readAll ? 0 : 5, 'y') + readAll = !readAll + r.unshift(putBack) + } +}) +const expect = [ + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy', + 'xxxxxxxxxx', + 'yyyyy' +] r.on('end', () => { - assert.deepStrictEqual(seen, expect); - silentConsole.log('ok'); -}); + assert.deepStrictEqual(seen, expect) + silentConsole.log('ok') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js index 15445d121a..fd09812727 100644 --- a/test/parallel/test-stream-unshift-read-race.js +++ b/test/parallel/test-stream-unshift-read-race.js @@ -18,126 +18,148 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -// This test verifies that: +const assert = require('assert') // This test verifies that: // 1. unshift() does not cause colliding _read() calls. // 2. unshift() after the 'end' event is an error, but after the EOF // signalling null, it is ok, and just creates a new readable chunk. // 3. push() after the EOF signaling null is an error. // 4. _read() is not called after pushing the EOF null chunk. -const stream = require('../../lib/ours/index'); -const hwm = 10; -const r = stream.Readable({ highWaterMark: hwm, autoDestroy: false }); -const chunks = 10; +const stream = require('../../lib/ours/index') + +const hwm = 10 +const r = stream.Readable({ + highWaterMark: hwm, + autoDestroy: false +}) +const chunks = 10 +const data = Buffer.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2)) -const data = Buffer.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2)); for (let i = 0; i < data.length; i++) { - const c = 'asdf'.charCodeAt(i % 4); - data[i] = c; + const c = 'asdf'.charCodeAt(i % 4) + data[i] = c } -let pos = 0; -let pushedNull = false; -r._read = function(n) { - assert(!pushedNull, '_read after null push'); +let pos = 0 +let pushedNull = false + +r._read = function (n) { + assert(!pushedNull, '_read after null push') // Every third chunk is fast - // Every third chunk is fast - push(!(chunks % 3)); + push(!(chunks % 3)) function push(fast) { - assert(!pushedNull, 'push() after null push'); - const c = pos >= data.length ? null : data.slice(pos, pos + n); - pushedNull = c === null; + assert(!pushedNull, 'push() after null push') + const c = pos >= data.length ? null : data.slice(pos, pos + n) + pushedNull = c === null + if (fast) { - pos += n; - r.push(c); - if (c === null) pushError(); + pos += n + r.push(c) + if (c === null) pushError() } else { - setTimeout(function() { - pos += n; - r.push(c); - if (c === null) pushError(); - }, 1); + setTimeout(function () { + pos += n + r.push(c) + if (c === null) pushError() + }, 1) } } -}; +} function pushError() { - r.unshift(Buffer.allocUnsafe(1)); - w.end(); - - assert.throws(() => { - r.push(Buffer.allocUnsafe(1)); - }, { - code: 'ERR_STREAM_PUSH_AFTER_EOF', - name: 'Error', - message: 'stream.push() after EOF' - }); + r.unshift(Buffer.allocUnsafe(1)) + w.end() + assert.throws( + () => { + r.push(Buffer.allocUnsafe(1)) + }, + { + code: 'ERR_STREAM_PUSH_AFTER_EOF', + name: 'Error', + message: 'stream.push() after EOF' + } + ) } +const w = stream.Writable() +const written = [] -const w = stream.Writable(); -const written = []; -w._write = function(chunk, encoding, cb) { - written.push(chunk.toString()); - cb(); -}; +w._write = function (chunk, encoding, cb) { + written.push(chunk.toString()) + cb() +} -r.on('end', common.mustNotCall()); +r.on('end', common.mustNotCall()) +r.on('readable', function () { + let chunk -r.on('readable', function() { - let chunk; while (null !== (chunk = r.read(10))) { - w.write(chunk); - if (chunk.length > 4) - r.unshift(Buffer.from('1234')); + w.write(chunk) + if (chunk.length > 4) r.unshift(Buffer.from('1234')) } -}); - -w.on('finish', common.mustCall(function() { - // Each chunk should start with 1234, and then be asfdasdfasdf... - // The first got pulled out before the first unshift('1234'), so it's - // lacking that piece. - assert.strictEqual(written[0], 'asdfasdfas'); - let asdf = 'd'; - silentConsole.error(`0: ${written[0]}`); - for (let i = 1; i < written.length; i++) { - silentConsole.error(`${i.toString(32)}: ${written[i]}`); - assert.strictEqual(written[i].slice(0, 4), '1234'); - for (let j = 4; j < written[i].length; j++) { - const c = written[i].charAt(j); - assert.strictEqual(c, asdf); - switch (asdf) { - case 'a': asdf = 's'; break; - case 's': asdf = 'd'; break; - case 'd': asdf = 'f'; break; - case 'f': asdf = 'a'; break; +}) +w.on( + 'finish', + common.mustCall(function () { + // Each chunk should start with 1234, and then be asfdasdfasdf... + // The first got pulled out before the first unshift('1234'), so it's + // lacking that piece. + assert.strictEqual(written[0], 'asdfasdfas') + let asdf = 'd' + silentConsole.error(`0: ${written[0]}`) + + for (let i = 1; i < written.length; i++) { + silentConsole.error(`${i.toString(32)}: ${written[i]}`) + assert.strictEqual(written[i].slice(0, 4), '1234') + + for (let j = 4; j < written[i].length; j++) { + const c = written[i].charAt(j) + assert.strictEqual(c, asdf) + + switch (asdf) { + case 'a': + asdf = 's' + break + + case 's': + asdf = 'd' + break + + case 'd': + asdf = 'f' + break + + case 'f': + asdf = 'a' + break + } } } + }) +) +process.on('exit', function () { + assert.strictEqual(written.length, 18) + silentConsole.log('ok') +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) } -})); - -process.on('exit', function() { - assert.strictEqual(written.length, 18); - silentConsole.log('ok'); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-aborted.js b/test/parallel/test-stream-writable-aborted.js index 68f74e9858..bc82a85c8d 100644 --- a/test/parallel/test-stream-writable-aborted.js +++ b/test/parallel/test-stream-writable-aborted.js @@ -1,41 +1,42 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); -const assert = require('assert'); -const { Writable } = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') + +const { Writable } = require('../../lib/ours/index') { const writable = new Writable({ - write() { - } - }); - assert.strictEqual(writable.writableAborted, false); - writable.destroy(); - assert.strictEqual(writable.writableAborted, true); + write() {} + }) + assert.strictEqual(writable.writableAborted, false) + writable.destroy() + assert.strictEqual(writable.writableAborted, true) } - { const writable = new Writable({ - write() { - } - }); - assert.strictEqual(writable.writableAborted, false); - writable.end(); - writable.destroy(); - assert.strictEqual(writable.writableAborted, true); + write() {} + }) + assert.strictEqual(writable.writableAborted, false) + writable.end() + writable.destroy() + assert.strictEqual(writable.writableAborted, true) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-callback-twice.js b/test/parallel/test-stream-writable-callback-twice.js index 819569fa14..e636ca16f4 100644 --- a/test/parallel/test-stream-writable-callback-twice.js +++ b/test/parallel/test-stream-writable-callback-twice.js @@ -1,29 +1,38 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Writable } = require('../../lib/ours/index'); -const stream = new Writable({ - write(chunk, enc, cb) { cb(); cb(); } -}); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') -stream.on('error', common.expectsError({ - name: 'Error', - message: 'Callback called multiple times', - code: 'ERR_MULTIPLE_CALLBACK' -})); +const { Writable } = require('../../lib/ours/index') -stream.write('foo'); +const stream = new Writable({ + write(chunk, enc, cb) { + cb() + cb() + } +}) +stream.on( + 'error', + common.expectsError({ + name: 'Error', + message: 'Callback called multiple times', + code: 'ERR_MULTIPLE_CALLBACK' + }) +) +stream.write('foo') +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js index f356d28dec..78e4546458 100644 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -18,76 +18,94 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') + +const assert = require('assert') -const stream = require('../../lib/ours/index'); +const stream = require('../../lib/ours/index') class MyWritable extends stream.Writable { constructor(fn, options) { - super(options); - this.fn = fn; + super(options) + this.fn = fn } _write(chunk, encoding, callback) { - this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); - callback(); + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding) + callback() } } -(function defaultCondingIsUtf8() { - const m = new MyWritable(function(isBuffer, type, enc) { - assert.strictEqual(enc, 'utf8'); - }, { decodeStrings: false }); - m.write('foo'); - m.end(); -}()); - -(function changeDefaultEncodingToAscii() { - const m = new MyWritable(function(isBuffer, type, enc) { - assert.strictEqual(enc, 'ascii'); - }, { decodeStrings: false }); - m.setDefaultEncoding('ascii'); - m.write('bar'); - m.end(); -}()); - -// Change default encoding to invalid value. -assert.throws(() => { +;(function defaultCondingIsUtf8() { const m = new MyWritable( - (isBuffer, type, enc) => {}, - { decodeStrings: false }); - m.setDefaultEncoding({}); - m.write('bar'); - m.end(); -}, { - name: 'TypeError', - code: 'ERR_UNKNOWN_ENCODING', - message: 'Unknown encoding: {}' -}); - -(function checkVariableCaseEncoding() { - const m = new MyWritable(function(isBuffer, type, enc) { - assert.strictEqual(enc, 'ascii'); - }, { decodeStrings: false }); - m.setDefaultEncoding('AsCii'); - m.write('bar'); - m.end(); -}()); + function (isBuffer, type, enc) { + assert.strictEqual(enc, 'utf8') + }, + { + decodeStrings: false + } + ) + m.write('foo') + m.end() +})() +;(function changeDefaultEncodingToAscii() { + const m = new MyWritable( + function (isBuffer, type, enc) { + assert.strictEqual(enc, 'ascii') + }, + { + decodeStrings: false + } + ) + m.setDefaultEncoding('ascii') + m.write('bar') + m.end() +})() // Change default encoding to invalid value. - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); +assert.throws( + () => { + const m = new MyWritable((isBuffer, type, enc) => {}, { + decodeStrings: false + }) + m.setDefaultEncoding({}) + m.write('bar') + m.end() + }, + { + name: 'TypeError', + code: 'ERR_UNKNOWN_ENCODING', + message: 'Unknown encoding: {}' + } +) +;(function checkVariableCaseEncoding() { + const m = new MyWritable( + function (isBuffer, type, enc) { + assert.strictEqual(enc, 'ascii') + }, + { + decodeStrings: false } - }); - /* replacement end */ + ) + m.setDefaultEncoding('AsCii') + m.write('bar') + m.end() +})() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-clear-buffer.js b/test/parallel/test-stream-writable-clear-buffer.js index 7e2a387c23..f1a390c6e3 100644 --- a/test/parallel/test-stream-writable-clear-buffer.js +++ b/test/parallel/test-stream-writable-clear-buffer.js @@ -1,50 +1,54 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -// This test ensures that the _writeableState.bufferedRequestCount and + error() {} +} // This test ensures that the _writeableState.bufferedRequestCount and // the actual buffered request count are the same. -const common = require('../common'); -const Stream = require('../../lib/ours/index'); -const assert = require('assert'); +const common = require('../common') + +const Stream = require('../../lib/ours/index') + +const assert = require('assert') class StreamWritable extends Stream.Writable { constructor() { - super({ objectMode: true }); - } - - // Refs: https://github.com/nodejs/node/issues/6758 + super({ + objectMode: true + }) + } // Refs: https://github.com/nodejs/node/issues/6758 // We need a timer like on the original issue thread. // Otherwise the code will never reach our test case. + _write(chunk, encoding, cb) { - setImmediate(cb); + setImmediate(cb) } } -const testStream = new StreamWritable(); -testStream.cork(); +const testStream = new StreamWritable() +testStream.cork() for (let i = 1; i <= 5; i++) { - testStream.write(i, common.mustCall(() => { - assert.strictEqual( - testStream._writableState.bufferedRequestCount, - testStream._writableState.getBuffer().length - ); - })); + testStream.write( + i, + common.mustCall(() => { + assert.strictEqual(testStream._writableState.bufferedRequestCount, testStream._writableState.getBuffer().length) + }) + ) } -testStream.end(); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +testStream.end() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js index 0353401b4f..1821dff1a4 100644 --- a/test/parallel/test-stream-writable-constructor-set-methods.js +++ b/test/parallel/test-stream-writable-constructor-set-methods.js @@ -1,56 +1,58 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const assert = require('assert'); -const { Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -const bufferBlerg = Buffer.from('blerg'); -const w = new Writable(); +const assert = require('assert') +const { Writable } = require('../../lib/ours/index') + +const bufferBlerg = Buffer.from('blerg') +const w = new Writable() assert.throws( () => { - w.end(bufferBlerg); + w.end(bufferBlerg) }, { name: 'Error', code: 'ERR_METHOD_NOT_IMPLEMENTED', message: 'The _write() method is not implemented' } -); +) const _write = common.mustCall((chunk, _, next) => { - next(); -}); + next() +}) const _writev = common.mustCall((chunks, next) => { - assert.strictEqual(chunks.length, 2); - next(); -}); - -const w2 = new Writable({ write: _write, writev: _writev }); - -assert.strictEqual(w2._write, _write); -assert.strictEqual(w2._writev, _writev); - -w2.write(bufferBlerg); - -w2.cork(); -w2.write(bufferBlerg); -w2.write(bufferBlerg); - -w2.end(); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.strictEqual(chunks.length, 2) + next() +}) + +const w2 = new Writable({ + write: _write, + writev: _writev +}) +assert.strictEqual(w2._write, _write) +assert.strictEqual(w2._writev, _writev) +w2.write(bufferBlerg) +w2.cork() +w2.write(bufferBlerg) +w2.write(bufferBlerg) +w2.end() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js index 8180205fc3..5528943b2b 100644 --- a/test/parallel/test-stream-writable-decoded-encoding.js +++ b/test/parallel/test-stream-writable-decoded-encoding.js @@ -18,56 +18,68 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') + +const assert = require('assert') -const stream = require('../../lib/ours/index'); +const stream = require('../../lib/ours/index') class MyWritable extends stream.Writable { constructor(fn, options) { - super(options); - this.fn = fn; + super(options) + this.fn = fn } _write(chunk, encoding, callback) { - this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding); - callback(); + this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding) + callback() } } { - const m = new MyWritable(function(isBuffer, type, enc) { - assert(isBuffer); - assert.strictEqual(type, 'object'); - assert.strictEqual(enc, 'buffer'); - }, { decodeStrings: true }); - m.write('some-text', 'utf8'); - m.end(); + const m = new MyWritable( + function (isBuffer, type, enc) { + assert(isBuffer) + assert.strictEqual(type, 'object') + assert.strictEqual(enc, 'buffer') + }, + { + decodeStrings: true + } + ) + m.write('some-text', 'utf8') + m.end() } - { - const m = new MyWritable(function(isBuffer, type, enc) { - assert(!isBuffer); - assert.strictEqual(type, 'string'); - assert.strictEqual(enc, 'utf8'); - }, { decodeStrings: false }); - m.write('some-text', 'utf8'); - m.end(); + const m = new MyWritable( + function (isBuffer, type, enc) { + assert(!isBuffer) + assert.strictEqual(type, 'string') + assert.strictEqual(enc, 'utf8') + }, + { + decodeStrings: false + } + ) + m.write('some-text', 'utf8') + m.end() } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js index 52f11fd2a8..5edecba31a 100644 --- a/test/parallel/test-stream-writable-destroy.js +++ b/test/parallel/test-stream-writable-destroy.js @@ -1,262 +1,264 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Writable, addAbortSignal } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -{ - const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); +const { Writable, addAbortSignal } = require('../../lib/ours/index') - write.on('finish', common.mustNotCall()); - write.on('close', common.mustCall()); +const assert = require('assert') - write.destroy(); - assert.strictEqual(write.destroyed, true); +{ + const write = new Writable({ + write(chunk, enc, cb) { + cb() + } + }) + write.on('finish', common.mustNotCall()) + write.on('close', common.mustCall()) + write.destroy() + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ write(chunk, enc, cb) { - this.destroy(new Error('asd')); - cb(); + this.destroy(new Error('asd')) + cb() } - }); - - write.on('error', common.mustCall()); - write.on('finish', common.mustNotCall()); - write.end('asd'); - assert.strictEqual(write.destroyed, true); + }) + write.on('error', common.mustCall()) + write.on('finish', common.mustNotCall()) + write.end('asd') + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - const expected = new Error('kaboom'); - - write.on('finish', common.mustNotCall()); - write.on('close', common.mustCall()); - write.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - write.destroy(expected); - assert.strictEqual(write.destroyed, true); + write(chunk, enc, cb) { + cb() + } + }) + const expected = new Error('kaboom') + write.on('finish', common.mustNotCall()) + write.on('close', common.mustCall()) + write.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + write.destroy(expected) + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write._destroy = function(err, cb) { - assert.strictEqual(err, expected); - cb(err); - }; - - const expected = new Error('kaboom'); + write(chunk, enc, cb) { + cb() + } + }) - write.on('finish', common.mustNotCall('no finish event')); - write.on('close', common.mustCall()); - write.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); + write._destroy = function (err, cb) { + assert.strictEqual(err, expected) + cb(err) + } - write.destroy(expected); - assert.strictEqual(write.destroyed, true); + const expected = new Error('kaboom') + write.on('finish', common.mustNotCall('no finish event')) + write.on('close', common.mustCall()) + write.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + write.destroy(expected) + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); }, - destroy: common.mustCall(function(err, cb) { - assert.strictEqual(err, expected); - cb(); - }) - }); - - const expected = new Error('kaboom'); - - write.on('finish', common.mustNotCall('no finish event')); - write.on('close', common.mustCall()); - - // Error is swallowed by the custom _destroy - write.on('error', common.mustNotCall('no error event')); + write(chunk, enc, cb) { + cb() + }, - write.destroy(expected); - assert.strictEqual(write.destroyed, true); + destroy: common.mustCall(function (err, cb) { + assert.strictEqual(err, expected) + cb() + }) + }) + const expected = new Error('kaboom') + write.on('finish', common.mustNotCall('no finish event')) + write.on('close', common.mustCall()) // Error is swallowed by the custom _destroy + + write.on('error', common.mustNotCall('no error event')) + write.destroy(expected) + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(); - }); - - write.destroy(); - assert.strictEqual(write.destroyed, true); + write(chunk, enc, cb) { + cb() + } + }) + write._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb() + }) + write.destroy() + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); + write(chunk, enc, cb) { + cb() + } + }) + write._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) process.nextTick(() => { - this.end(); - cb(); - }); - }); - - const fail = common.mustNotCall('no finish event'); - - write.on('finish', fail); - write.on('close', common.mustCall()); - - write.destroy(); - - assert.strictEqual(write.destroyed, true); + this.end() + cb() + }) + }) + const fail = common.mustNotCall('no finish event') + write.on('finish', fail) + write.on('close', common.mustCall()) + write.destroy() + assert.strictEqual(write.destroyed, true) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - const expected = new Error('kaboom'); - - write._destroy = common.mustCall(function(err, cb) { - assert.strictEqual(err, null); - cb(expected); - }); - - write.on('close', common.mustCall()); - write.on('finish', common.mustNotCall('no finish event')); - write.on('error', common.mustCall((err) => { - assert.strictEqual(err, expected); - })); - - write.destroy(); - assert.strictEqual(write.destroyed, true); + write(chunk, enc, cb) { + cb() + } + }) + const expected = new Error('kaboom') + write._destroy = common.mustCall(function (err, cb) { + assert.strictEqual(err, null) + cb(expected) + }) + write.on('close', common.mustCall()) + write.on('finish', common.mustNotCall('no finish event')) + write.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, expected) + }) + ) + write.destroy() + assert.strictEqual(write.destroyed, true) } - { // double error case const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - let ticked = false; - write.on('close', common.mustCall(() => { - assert.strictEqual(ticked, true); - })); - write.on('error', common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(err.message, 'kaboom 1'); - assert.strictEqual(write._writableState.errorEmitted, true); - })); - - const expected = new Error('kaboom 1'); - write.destroy(expected); - write.destroy(new Error('kaboom 2')); - assert.strictEqual(write._writableState.errored, expected); - assert.strictEqual(write._writableState.errorEmitted, false); - assert.strictEqual(write.destroyed, true); - ticked = true; + write(chunk, enc, cb) { + cb() + } + }) + let ticked = false + write.on( + 'close', + common.mustCall(() => { + assert.strictEqual(ticked, true) + }) + ) + write.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(err.message, 'kaboom 1') + assert.strictEqual(write._writableState.errorEmitted, true) + }) + ) + const expected = new Error('kaboom 1') + write.destroy(expected) + write.destroy(new Error('kaboom 2')) + assert.strictEqual(write._writableState.errored, expected) + assert.strictEqual(write._writableState.errorEmitted, false) + assert.strictEqual(write.destroyed, true) + ticked = true } - { const writable = new Writable({ - destroy: common.mustCall(function(err, cb) { - process.nextTick(cb, new Error('kaboom 1')); + destroy: common.mustCall(function (err, cb) { + process.nextTick(cb, new Error('kaboom 1')) }), + write(chunk, enc, cb) { - cb(); + cb() } - }); - - let ticked = false; - writable.on('close', common.mustCall(() => { - writable.on('error', common.mustNotCall()); - writable.destroy(new Error('hello')); - assert.strictEqual(ticked, true); - assert.strictEqual(writable._writableState.errorEmitted, true); - })); - writable.on('error', common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(err.message, 'kaboom 1'); - assert.strictEqual(writable._writableState.errorEmitted, true); - })); - - writable.destroy(); - assert.strictEqual(writable.destroyed, true); - assert.strictEqual(writable._writableState.errored, null); - assert.strictEqual(writable._writableState.errorEmitted, false); - - // Test case where `writable.destroy()` is called again with an error before + }) + let ticked = false + writable.on( + 'close', + common.mustCall(() => { + writable.on('error', common.mustNotCall()) + writable.destroy(new Error('hello')) + assert.strictEqual(ticked, true) + assert.strictEqual(writable._writableState.errorEmitted, true) + }) + ) + writable.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(err.message, 'kaboom 1') + assert.strictEqual(writable._writableState.errorEmitted, true) + }) + ) + writable.destroy() + assert.strictEqual(writable.destroyed, true) + assert.strictEqual(writable._writableState.errored, null) + assert.strictEqual(writable._writableState.errorEmitted, false) // Test case where `writable.destroy()` is called again with an error before // the `_destroy()` callback is called. - writable.destroy(new Error('kaboom 2')); - assert.strictEqual(writable._writableState.errorEmitted, false); - assert.strictEqual(writable._writableState.errored, null); - ticked = true; + writable.destroy(new Error('kaboom 2')) + assert.strictEqual(writable._writableState.errorEmitted, false) + assert.strictEqual(writable._writableState.errored, null) + ticked = true } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write.destroyed = true; - assert.strictEqual(write.destroyed, true); + write(chunk, enc, cb) { + cb() + } + }) + write.destroyed = true + assert.strictEqual(write.destroyed, true) // The internal destroy() mechanism should not be triggered - // The internal destroy() mechanism should not be triggered - write.on('close', common.mustNotCall()); - write.destroy(); + write.on('close', common.mustNotCall()) + write.destroy() } - { function MyWritable() { - assert.strictEqual(this.destroyed, false); - this.destroyed = false; - Writable.call(this); + assert.strictEqual(this.destroyed, false) + this.destroyed = false + Writable.call(this) } - Object.setPrototypeOf(MyWritable.prototype, Writable.prototype); - Object.setPrototypeOf(MyWritable, Writable); - - new MyWritable(); + Object.setPrototypeOf(MyWritable.prototype, Writable.prototype) + Object.setPrototypeOf(MyWritable, Writable) + new MyWritable() } - { // Destroy and destroy callback const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write.destroy(); - - const expected = new Error('kaboom'); - - write.destroy(expected, common.mustCall((err) => { - assert.strictEqual(err, undefined); - })); + write(chunk, enc, cb) { + cb() + } + }) + write.destroy() + const expected = new Error('kaboom') + write.destroy( + expected, + common.mustCall((err) => { + assert.strictEqual(err, undefined) + }) + ) } - { // Checks that `._undestroy()` restores the state so that `final` will be // called again. @@ -264,241 +266,300 @@ const assert = require('assert'); write: common.mustNotCall(), final: common.mustCall((cb) => cb(), 2), autoDestroy: true - }); - - write.end(); - write.once('close', common.mustCall(() => { - write._undestroy(); - write.end(); - })); + }) + write.end() + write.once( + 'close', + common.mustCall(() => { + write._undestroy() + + write.end() + }) + ) } - { - const write = new Writable(); - - write.destroy(); - write.on('error', common.mustNotCall()); - write.write('asd', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_DESTROYED', - message: 'Cannot call write after a stream was destroyed' - })); + const write = new Writable() + write.destroy() + write.on('error', common.mustNotCall()) + write.write( + 'asd', + common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + }) + ) } - { const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write.on('error', common.mustNotCall()); - - write.cork(); - write.write('asd', common.mustCall()); - write.uncork(); - - write.cork(); - write.write('asd', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_DESTROYED', - message: 'Cannot call write after a stream was destroyed' - })); - write.destroy(); - write.write('asd', common.expectsError({ - name: 'Error', - code: 'ERR_STREAM_DESTROYED', - message: 'Cannot call write after a stream was destroyed' - })); - write.uncork(); + write(chunk, enc, cb) { + cb() + } + }) + write.on('error', common.mustNotCall()) + write.cork() + write.write('asd', common.mustCall()) + write.uncork() + write.cork() + write.write( + 'asd', + common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + }) + ) + write.destroy() + write.write( + 'asd', + common.expectsError({ + name: 'Error', + code: 'ERR_STREAM_DESTROYED', + message: 'Cannot call write after a stream was destroyed' + }) + ) + write.uncork() } - { // Call end(cb) after error & destroy - const write = new Writable({ - write(chunk, enc, cb) { cb(new Error('asd')); } - }); - write.on('error', common.mustCall(() => { - write.destroy(); - let ticked = false; - write.end(common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); - })); - ticked = true; - })); - write.write('asd'); + write(chunk, enc, cb) { + cb(new Error('asd')) + } + }) + write.on( + 'error', + common.mustCall(() => { + write.destroy() + let ticked = false + write.end( + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED') + }) + ) + ticked = true + }) + ) + write.write('asd') } - { // Call end(cb) after finish & destroy - const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - write.on('finish', common.mustCall(() => { - write.destroy(); - let ticked = false; - write.end(common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); - })); - ticked = true; - })); - write.end(); + write(chunk, enc, cb) { + cb() + } + }) + write.on( + 'finish', + common.mustCall(() => { + write.destroy() + let ticked = false + write.end( + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED') + }) + ) + ticked = true + }) + ) + write.end() } - { // Call end(cb) after error & destroy and don't trigger // unhandled exception. - const write = new Writable({ - write(chunk, enc, cb) { process.nextTick(cb); } - }); - const _err = new Error('asd'); - write.once('error', common.mustCall((err) => { - assert.strictEqual(err.message, 'asd'); - })); - write.end('asd', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - write.destroy(_err); -} + write(chunk, enc, cb) { + process.nextTick(cb) + } + }) + + const _err = new Error('asd') + write.once( + 'error', + common.mustCall((err) => { + assert.strictEqual(err.message, 'asd') + }) + ) + write.end( + 'asd', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + write.destroy(_err) +} { // Call buffered write callback with error + const _err = new Error('asd') - const _err = new Error('asd'); const write = new Writable({ write(chunk, enc, cb) { - process.nextTick(cb, _err); + process.nextTick(cb, _err) }, + autoDestroy: false - }); - write.cork(); - write.write('asd', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - write.write('asd', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - write.on('error', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - write.uncork(); + }) + write.cork() + write.write( + 'asd', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + write.write( + 'asd', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + write.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + write.uncork() } - { // Ensure callback order. - - let state = 0; + let state = 0 const write = new Writable({ write(chunk, enc, cb) { // `setImmediate()` is used on purpose to ensure the callback is called // after `process.nextTick()` callbacks. - setImmediate(cb); + setImmediate(cb) } - }); - write.write('asd', common.mustCall(() => { - assert.strictEqual(state++, 0); - })); - write.write('asd', common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); - assert.strictEqual(state++, 1); - })); - write.destroy(); + }) + write.write( + 'asd', + common.mustCall(() => { + assert.strictEqual(state++, 0) + }) + ) + write.write( + 'asd', + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED') + assert.strictEqual(state++, 1) + }) + ) + write.destroy() } - { const write = new Writable({ autoDestroy: false, + write(chunk, enc, cb) { - cb(); - cb(); + cb() + cb() } - }); - - write.on('error', common.mustCall(() => { - assert(write._writableState.errored); - })); - write.write('asd'); + }) + write.on( + 'error', + common.mustCall(() => { + assert(write._writableState.errored) + }) + ) + write.write('asd') } - { - const ac = new AbortController(); - const write = addAbortSignal(ac.signal, new Writable({ - write(chunk, enc, cb) { cb(); } - })); - - write.on('error', common.mustCall((e) => { - assert.strictEqual(e.name, 'AbortError'); - assert.strictEqual(write.destroyed, true); - })); - write.write('asd'); - ac.abort(); + const ac = new AbortController() + const write = addAbortSignal( + ac.signal, + new Writable({ + write(chunk, enc, cb) { + cb() + } + }) + ) + write.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError') + assert.strictEqual(write.destroyed, true) + }) + ) + write.write('asd') + ac.abort() } - { - const ac = new AbortController(); + const ac = new AbortController() const write = new Writable({ signal: ac.signal, - write(chunk, enc, cb) { cb(); } - }); - write.on('error', common.mustCall((e) => { - assert.strictEqual(e.name, 'AbortError'); - assert.strictEqual(write.destroyed, true); - })); - write.write('asd'); - ac.abort(); + write(chunk, enc, cb) { + cb() + } + }) + write.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError') + assert.strictEqual(write.destroyed, true) + }) + ) + write.write('asd') + ac.abort() } - { - const signal = AbortSignal.abort(); - + const signal = AbortSignal.abort() const write = new Writable({ signal, - write(chunk, enc, cb) { cb(); } - }); - write.on('error', common.mustCall((e) => { - assert.strictEqual(e.name, 'AbortError'); - assert.strictEqual(write.destroyed, true); - })); + write(chunk, enc, cb) { + cb() + } + }) + write.on( + 'error', + common.mustCall((e) => { + assert.strictEqual(e.name, 'AbortError') + assert.strictEqual(write.destroyed, true) + }) + ) } - { // Destroy twice const write = new Writable({ - write(chunk, enc, cb) { cb(); } - }); - - write.end(common.mustCall()); - write.destroy(); - write.destroy(); + write(chunk, enc, cb) { + cb() + } + }) + write.end(common.mustCall()) + write.destroy() + write.destroy() } - { // https://github.com/nodejs/node/issues/39356 const s = new Writable({ final() {} - }); - const _err = new Error('oh no'); - // Remove `callback` and it works - s.end(common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - s.on('error', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - s.destroy(_err); + }) + + const _err = new Error('oh no') // Remove `callback` and it works + + s.end( + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + s.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + s.destroy(_err) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-end-cb-error.js b/test/parallel/test-stream-writable-end-cb-error.js index c69be9a596..c18650055a 100644 --- a/test/parallel/test-stream-writable-end-cb-error.js +++ b/test/parallel/test-stream-writable-end-cb-error.js @@ -1,93 +1,123 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') { // Invoke end callback on failure. - const writable = new stream.Writable(); + const writable = new stream.Writable() + + const _err = new Error('kaboom') - const _err = new Error('kaboom'); writable._write = (chunk, encoding, cb) => { - process.nextTick(cb, _err); - }; + process.nextTick(cb, _err) + } - writable.on('error', common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - writable.write('asd'); - writable.end(common.mustCall((err) => { - assert.strictEqual(err, _err); - })); - writable.end(common.mustCall((err) => { - assert.strictEqual(err, _err); - })); + writable.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + writable.write('asd') + writable.end( + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) + writable.end( + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) + ) } - { // Don't invoke end callback twice - const writable = new stream.Writable(); + const writable = new stream.Writable() writable._write = (chunk, encoding, cb) => { - process.nextTick(cb); - }; + process.nextTick(cb) + } - let called = false; - writable.end('asd', common.mustCall((err) => { - called = true; - assert.strictEqual(err, undefined); - })); - - writable.on('error', common.mustCall((err) => { - assert.strictEqual(err.message, 'kaboom'); - })); - writable.on('finish', common.mustCall(() => { - assert.strictEqual(called, true); - writable.emit('error', new Error('kaboom')); - })); + let called = false + writable.end( + 'asd', + common.mustCall((err) => { + called = true + assert.strictEqual(err, undefined) + }) + ) + writable.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom') + }) + ) + writable.on( + 'finish', + common.mustCall(() => { + assert.strictEqual(called, true) + writable.emit('error', new Error('kaboom')) + }) + ) } - { const w = new stream.Writable({ write(chunk, encoding, callback) { - setImmediate(callback); + setImmediate(callback) }, + finish(callback) { - setImmediate(callback); + setImmediate(callback) } - }); - w.end('testing ended state', common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); - })); - assert.strictEqual(w.destroyed, false); - assert.strictEqual(w.writableEnded, true); - w.end(common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); - })); - assert.strictEqual(w.destroyed, false); - assert.strictEqual(w.writableEnded, true); - w.end('end', common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); - })); - assert.strictEqual(w.destroyed, true); - w.on('error', common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); - })); - w.on('finish', common.mustNotCall()); + }) + w.end( + 'testing ended state', + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END') + }) + ) + assert.strictEqual(w.destroyed, false) + assert.strictEqual(w.writableEnded, true) + w.end( + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END') + }) + ) + assert.strictEqual(w.destroyed, false) + assert.strictEqual(w.writableEnded, true) + w.end( + 'end', + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END') + }) + ) + assert.strictEqual(w.destroyed, true) + w.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END') + }) + ) + w.on('finish', common.mustNotCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-end-cb-uncaught.js b/test/parallel/test-stream-writable-end-cb-uncaught.js index 222fb52546..802be03982 100644 --- a/test/parallel/test-stream-writable-end-cb-uncaught.js +++ b/test/parallel/test-stream-writable-end-cb-uncaught.js @@ -1,39 +1,49 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -process.on('uncaughtException', common.mustCall((err) => { - assert.strictEqual(err.message, 'kaboom'); -})); +const assert = require('assert') -const writable = new stream.Writable(); -const _err = new Error('kaboom'); +const stream = require('../../lib/ours/index') + +process.on( + 'uncaughtException', + common.mustCall((err) => { + assert.strictEqual(err.message, 'kaboom') + }) +) +const writable = new stream.Writable() + +const _err = new Error('kaboom') writable._write = (chunk, encoding, cb) => { - cb(); -}; + cb() +} + writable._final = (cb) => { - cb(_err); -}; - -writable.write('asd'); -writable.end(common.mustCall((err) => { - assert.strictEqual(err, _err); -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + cb(_err) +} + +writable.write('asd') +writable.end( + common.mustCall((err) => { + assert.strictEqual(err, _err) + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-end-multiple.js b/test/parallel/test-stream-writable-end-multiple.js index 6b30345a9b..868c79b491 100644 --- a/test/parallel/test-stream-writable-end-multiple.js +++ b/test/parallel/test-stream-writable-end-multiple.js @@ -1,37 +1,46 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); + error() {} +} +const common = require('../common') -const assert = require('assert'); -const stream = require('../../lib/ours/index'); +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +const writable = new stream.Writable() -const writable = new stream.Writable(); writable._write = (chunk, encoding, cb) => { - setTimeout(() => cb(), 10); -}; - -writable.end('testing ended state', common.mustCall()); -writable.end(common.mustCall()); -writable.on('finish', common.mustCall(() => { - let ticked = false; - writable.end(common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED'); - })); - ticked = true; -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + setTimeout(() => cb(), 10) +} + +writable.end('testing ended state', common.mustCall()) +writable.end(common.mustCall()) +writable.on( + 'finish', + common.mustCall(() => { + let ticked = false + writable.end( + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED') + }) + ) + ticked = true + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js index 2b5a85b06b..f91d66b56e 100644 --- a/test/parallel/test-stream-writable-ended-state.js +++ b/test/parallel/test-stream-writable-ended-state.js @@ -1,47 +1,51 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); + error() {} +} +const common = require('../common') -const assert = require('assert'); -const stream = require('../../lib/ours/index'); +const assert = require('assert') -const writable = new stream.Writable(); +const stream = require('../../lib/ours/index') + +const writable = new stream.Writable() writable._write = (chunk, encoding, cb) => { - assert.strictEqual(writable._writableState.ended, false); - assert.strictEqual(writable._writableState.writable, undefined); - assert.strictEqual(writable.writableEnded, false); - cb(); -}; - -assert.strictEqual(writable._writableState.ended, false); -assert.strictEqual(writable._writableState.writable, undefined); -assert.strictEqual(writable.writable, true); -assert.strictEqual(writable.writableEnded, false); - -writable.end('testing ended state', common.mustCall(() => { - assert.strictEqual(writable._writableState.ended, true); - assert.strictEqual(writable._writableState.writable, undefined); - assert.strictEqual(writable.writable, false); - assert.strictEqual(writable.writableEnded, true); -})); - -assert.strictEqual(writable._writableState.ended, true); -assert.strictEqual(writable._writableState.writable, undefined); -assert.strictEqual(writable.writable, false); -assert.strictEqual(writable.writableEnded, true); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.strictEqual(writable._writableState.ended, false) + assert.strictEqual(writable._writableState.writable, undefined) + assert.strictEqual(writable.writableEnded, false) + cb() +} + +assert.strictEqual(writable._writableState.ended, false) +assert.strictEqual(writable._writableState.writable, undefined) +assert.strictEqual(writable.writable, true) +assert.strictEqual(writable.writableEnded, false) +writable.end( + 'testing ended state', + common.mustCall(() => { + assert.strictEqual(writable._writableState.ended, true) + assert.strictEqual(writable._writableState.writable, undefined) + assert.strictEqual(writable.writable, false) + assert.strictEqual(writable.writableEnded, true) + }) +) +assert.strictEqual(writable._writableState.ended, true) +assert.strictEqual(writable._writableState.writable, undefined) +assert.strictEqual(writable.writable, false) +assert.strictEqual(writable.writableEnded, true) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-final-async.js b/test/parallel/test-stream-writable-final-async.js index 37dbd08e6f..f4643db8ee 100644 --- a/test/parallel/test-stream-writable-final-async.js +++ b/test/parallel/test-stream-writable-final-async.js @@ -1,48 +1,48 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Duplex, -} = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') - const st = require('timers').setTimeout; +const { Duplex } = require('../../lib/ours/index') - function setTimeout(ms) { - return new Promise(resolve => { - st(resolve, ms); - }); - } - +const st = require('timers').setTimeout + +function setTimeout(ms) { + return new Promise((resolve) => { + st(resolve, ms) + }) +} { class Foo extends Duplex { async _final(callback) { - await setTimeout(common.platformTimeout(1)); - callback(); + await setTimeout(common.platformTimeout(1)) + callback() } _read() {} } - const foo = new Foo(); + const foo = new Foo() foo._write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - foo.end('test', common.mustCall()); - foo.on('error', common.mustNotCall()); + cb() + }) + foo.end('test', common.mustCall()) + foo.on('error', common.mustNotCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js index 580040d6b1..bb5b529d4f 100644 --- a/test/parallel/test-stream-writable-final-destroy.js +++ b/test/parallel/test-stream-writable-final-destroy.js @@ -1,36 +1,39 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const { Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const { Writable } = require('../../lib/ours/index') { const w = new Writable({ write(chunk, encoding, callback) { - callback(null); + callback(null) }, + final(callback) { - queueMicrotask(callback); + queueMicrotask(callback) } - }); - w.end(); - w.destroy(); - - w.on('prefinish', common.mustNotCall()); - w.on('finish', common.mustNotCall()); - w.on('close', common.mustCall()); + }) + w.end() + w.destroy() + w.on('prefinish', common.mustNotCall()) + w.on('finish', common.mustNotCall()) + w.on('close', common.mustCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-final-throw.js b/test/parallel/test-stream-writable-final-throw.js index 848434544c..1bd88cbbb9 100644 --- a/test/parallel/test-stream-writable-final-throw.js +++ b/test/parallel/test-stream-writable-final-throw.js @@ -1,38 +1,44 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { - Duplex, -} = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const { Duplex } = require('../../lib/ours/index') { class Foo extends Duplex { _final(callback) { - throw new Error('fhqwhgads'); + throw new Error('fhqwhgads') } _read() {} } - const foo = new Foo(); + const foo = new Foo() foo._write = common.mustCall((chunk, encoding, cb) => { - cb(); - }); - foo.end('test', common.expectsError({ message: 'fhqwhgads' })); - foo.on('error', common.mustCall()); + cb() + }) + foo.end( + 'test', + common.expectsError({ + message: 'fhqwhgads' + }) + ) + foo.on('error', common.mustCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-finish-destroyed.js b/test/parallel/test-stream-writable-finish-destroyed.js index ec2ed6ba4f..e18a70f224 100644 --- a/test/parallel/test-stream-writable-finish-destroyed.js +++ b/test/parallel/test-stream-writable-finish-destroyed.js @@ -1,58 +1,62 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const { Writable } = require('../../lib/ours/index') { const w = new Writable({ write: common.mustCall((chunk, encoding, cb) => { - w.on('close', common.mustCall(() => { - cb(); - })); + w.on( + 'close', + common.mustCall(() => { + cb() + }) + ) }) - }); - - w.on('finish', common.mustNotCall()); - w.end('asd'); - w.destroy(); + }) + w.on('finish', common.mustNotCall()) + w.end('asd') + w.destroy() } - { const w = new Writable({ write: common.mustCall((chunk, encoding, cb) => { - w.on('close', common.mustCall(() => { - cb(); - w.end(); - })); + w.on( + 'close', + common.mustCall(() => { + cb() + w.end() + }) + ) }) - }); - - w.on('finish', common.mustNotCall()); - w.write('asd'); - w.destroy(); + }) + w.on('finish', common.mustNotCall()) + w.write('asd') + w.destroy() } - { const w = new Writable({ - write() { - } - }); - w.on('finish', common.mustNotCall()); - w.end(); - w.destroy(); + write() {} + }) + w.on('finish', common.mustNotCall()) + w.end() + w.destroy() } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js index 5f477ce296..d4fefe06f1 100644 --- a/test/parallel/test-stream-writable-finished-state.js +++ b/test/parallel/test-stream-writable-finished-state.js @@ -1,37 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); + error() {} +} +const common = require('../common') -const assert = require('assert'); -const stream = require('../../lib/ours/index'); +const assert = require('assert') -const writable = new stream.Writable(); +const stream = require('../../lib/ours/index') + +const writable = new stream.Writable() writable._write = (chunk, encoding, cb) => { // The state finished should start in false. - assert.strictEqual(writable._writableState.finished, false); - cb(); -}; - -writable.on('finish', common.mustCall(() => { - assert.strictEqual(writable._writableState.finished, true); -})); - -writable.end('testing finished state', common.mustCall(() => { - assert.strictEqual(writable._writableState.finished, true); -})); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.strictEqual(writable._writableState.finished, false) + cb() +} + +writable.on( + 'finish', + common.mustCall(() => { + assert.strictEqual(writable._writableState.finished, true) + }) +) +writable.end( + 'testing finished state', + common.mustCall(() => { + assert.strictEqual(writable._writableState.finished, true) + }) +) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-finished.js b/test/parallel/test-stream-writable-finished.js index 5f4a93df5e..8d31f4fea2 100644 --- a/test/parallel/test-stream-writable-finished.js +++ b/test/parallel/test-stream-writable-finished.js @@ -1,114 +1,116 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const { Writable } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const { Writable } = require('../../lib/ours/index') + +const assert = require('assert') // basic -// basic { // Find it on Writable.prototype - assert(Reflect.has(Writable.prototype, 'writableFinished')); -} + assert(Reflect.has(Writable.prototype, 'writableFinished')) +} // event -// event { - const writable = new Writable(); + const writable = new Writable() writable._write = (chunk, encoding, cb) => { // The state finished should start in false. - assert.strictEqual(writable.writableFinished, false); - cb(); - }; - - writable.on('finish', common.mustCall(() => { - assert.strictEqual(writable.writableFinished, true); - })); - - writable.end('testing finished state', common.mustCall(() => { - assert.strictEqual(writable.writableFinished, true); - })); + assert.strictEqual(writable.writableFinished, false) + cb() + } + + writable.on( + 'finish', + common.mustCall(() => { + assert.strictEqual(writable.writableFinished, true) + }) + ) + writable.end( + 'testing finished state', + common.mustCall(() => { + assert.strictEqual(writable.writableFinished, true) + }) + ) } - { // Emit finish asynchronously. - const w = new Writable({ write(chunk, encoding, cb) { - cb(); + cb() } - }); - - w.end(); - w.on('finish', common.mustCall()); + }) + w.end() + w.on('finish', common.mustCall()) } - { // Emit prefinish synchronously. - const w = new Writable({ write(chunk, encoding, cb) { - cb(); + cb() } - }); - - let sync = true; - w.on('prefinish', common.mustCall(() => { - assert.strictEqual(sync, true); - })); - w.end(); - sync = false; + }) + let sync = true + w.on( + 'prefinish', + common.mustCall(() => { + assert.strictEqual(sync, true) + }) + ) + w.end() + sync = false } - { // Emit prefinish synchronously w/ final. - const w = new Writable({ write(chunk, encoding, cb) { - cb(); + cb() }, + final(cb) { - cb(); + cb() } - }); - - let sync = true; - w.on('prefinish', common.mustCall(() => { - assert.strictEqual(sync, true); - })); - w.end(); - sync = false; + }) + let sync = true + w.on( + 'prefinish', + common.mustCall(() => { + assert.strictEqual(sync, true) + }) + ) + w.end() + sync = false } - - { // Call _final synchronously. - - let sync = true; + let sync = true const w = new Writable({ write(chunk, encoding, cb) { - cb(); + cb() }, + final: common.mustCall((cb) => { - assert.strictEqual(sync, true); - cb(); + assert.strictEqual(sync, true) + cb() }) - }); - - w.end(); - sync = false; + }) + w.end() + sync = false } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-invalid-chunk.js b/test/parallel/test-stream-writable-invalid-chunk.js index 6db696bec0..4290581165 100644 --- a/test/parallel/test-stream-writable-invalid-chunk.js +++ b/test/parallel/test-stream-writable-invalid-chunk.js @@ -1,51 +1,60 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') function testWriteType(val, objectMode, code) { const writable = new stream.Writable({ objectMode, write: () => {} - }); - writable.on('error', common.mustNotCall()); + }) + writable.on('error', common.mustNotCall()) + if (code) { - assert.throws(() => { - writable.write(val); - }, { code }); + assert.throws( + () => { + writable.write(val) + }, + { + code + } + ) } else { - writable.write(val); + writable.write(val) } } -testWriteType([], false, 'ERR_INVALID_ARG_TYPE'); -testWriteType({}, false, 'ERR_INVALID_ARG_TYPE'); -testWriteType(0, false, 'ERR_INVALID_ARG_TYPE'); -testWriteType(true, false, 'ERR_INVALID_ARG_TYPE'); -testWriteType(0.0, false, 'ERR_INVALID_ARG_TYPE'); -testWriteType(undefined, false, 'ERR_INVALID_ARG_TYPE'); -testWriteType(null, false, 'ERR_STREAM_NULL_VALUES'); - -testWriteType([], true); -testWriteType({}, true); -testWriteType(0, true); -testWriteType(true, true); -testWriteType(0.0, true); -testWriteType(undefined, true); -testWriteType(null, true, 'ERR_STREAM_NULL_VALUES'); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +testWriteType([], false, 'ERR_INVALID_ARG_TYPE') +testWriteType({}, false, 'ERR_INVALID_ARG_TYPE') +testWriteType(0, false, 'ERR_INVALID_ARG_TYPE') +testWriteType(true, false, 'ERR_INVALID_ARG_TYPE') +testWriteType(0.0, false, 'ERR_INVALID_ARG_TYPE') +testWriteType(undefined, false, 'ERR_INVALID_ARG_TYPE') +testWriteType(null, false, 'ERR_STREAM_NULL_VALUES') +testWriteType([], true) +testWriteType({}, true) +testWriteType(0, true) +testWriteType(true, true) +testWriteType(0.0, true) +testWriteType(undefined, true) +testWriteType(null, true, 'ERR_STREAM_NULL_VALUES') +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-needdrain-state.js b/test/parallel/test-stream-writable-needdrain-state.js index 8a6c12e0f4..0876c3742c 100644 --- a/test/parallel/test-stream-writable-needdrain-state.js +++ b/test/parallel/test-stream-writable-needdrain-state.js @@ -1,40 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const stream = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const stream = require('../../lib/ours/index') + +const assert = require('assert') const transform = new stream.Transform({ transform: _transform, highWaterMark: 1 -}); +}) function _transform(chunk, encoding, cb) { process.nextTick(() => { - assert.strictEqual(transform._writableState.needDrain, true); - cb(); - }); + assert.strictEqual(transform._writableState.needDrain, true) + cb() + }) } -assert.strictEqual(transform._writableState.needDrain, false); - -transform.write('asdasd', common.mustCall(() => { - assert.strictEqual(transform._writableState.needDrain, false); -})); - -assert.strictEqual(transform._writableState.needDrain, true); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +assert.strictEqual(transform._writableState.needDrain, false) +transform.write( + 'asdasd', + common.mustCall(() => { + assert.strictEqual(transform._writableState.needDrain, false) + }) +) +assert.strictEqual(transform._writableState.needDrain, true) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js index f9a8783f30..e60d895a52 100644 --- a/test/parallel/test-stream-writable-null.js +++ b/test/parallel/test-stream-writable-null.js @@ -1,62 +1,81 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') class MyWritable extends stream.Writable { constructor(options) { - super({ autoDestroy: false, ...options }); + super({ + autoDestroy: false, + ...options + }) } + _write(chunk, encoding, callback) { - assert.notStrictEqual(chunk, null); - callback(); + assert.notStrictEqual(chunk, null) + callback() } } { - const m = new MyWritable({ objectMode: true }); - m.on('error', common.mustNotCall()); - assert.throws(() => { - m.write(null); - }, { - code: 'ERR_STREAM_NULL_VALUES' - }); + const m = new MyWritable({ + objectMode: true + }) + m.on('error', common.mustNotCall()) + assert.throws( + () => { + m.write(null) + }, + { + code: 'ERR_STREAM_NULL_VALUES' + } + ) } - { - const m = new MyWritable(); - m.on('error', common.mustNotCall()); - assert.throws(() => { - m.write(false); - }, { - code: 'ERR_INVALID_ARG_TYPE' - }); + const m = new MyWritable() + m.on('error', common.mustNotCall()) + assert.throws( + () => { + m.write(false) + }, + { + code: 'ERR_INVALID_ARG_TYPE' + } + ) } - -{ // Should not throw. - const m = new MyWritable({ objectMode: true }); - m.write(false, assert.ifError); +{ + // Should not throw. + const m = new MyWritable({ + objectMode: true + }) + m.write(false, assert.ifError) } - -{ // Should not throw. - const m = new MyWritable({ objectMode: true }).on('error', (e) => { - assert.ifError(e || new Error('should not get here')); - }); - m.write(false, assert.ifError); +{ + // Should not throw. + const m = new MyWritable({ + objectMode: true + }).on('error', (e) => { + assert.ifError(e || new Error('should not get here')) + }) + m.write(false, assert.ifError) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-properties.js b/test/parallel/test-stream-writable-properties.js index c2b5ce177a..80ed31463e 100644 --- a/test/parallel/test-stream-writable-properties.js +++ b/test/parallel/test-stream-writable-properties.js @@ -1,37 +1,41 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Writable } = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') + +const { Writable } = require('../../lib/ours/index') { - const w = new Writable(); - assert.strictEqual(w.writableCorked, 0); - w.uncork(); - assert.strictEqual(w.writableCorked, 0); - w.cork(); - assert.strictEqual(w.writableCorked, 1); - w.cork(); - assert.strictEqual(w.writableCorked, 2); - w.uncork(); - assert.strictEqual(w.writableCorked, 1); - w.uncork(); - assert.strictEqual(w.writableCorked, 0); - w.uncork(); - assert.strictEqual(w.writableCorked, 0); + const w = new Writable() + assert.strictEqual(w.writableCorked, 0) + w.uncork() + assert.strictEqual(w.writableCorked, 0) + w.cork() + assert.strictEqual(w.writableCorked, 1) + w.cork() + assert.strictEqual(w.writableCorked, 2) + w.uncork() + assert.strictEqual(w.writableCorked, 1) + w.uncork() + assert.strictEqual(w.writableCorked, 0) + w.uncork() + assert.strictEqual(w.writableCorked, 0) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-samecb-singletick.js b/test/parallel/test-stream-writable-samecb-singletick.js index 6f72e25db4..bb3cf482fd 100644 --- a/test/parallel/test-stream-writable-samecb-singletick.js +++ b/test/parallel/test-stream-writable-samecb-singletick.js @@ -1,15 +1,19 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Console } = require('console'); -const { Writable } = require('../../lib/ours/index'); -const async_hooks = require('async_hooks'); +const silentConsole = { + log() {}, -// Make sure that repeated calls to silentConsole.log(), and by extension + error() {} +} +const common = require('../common') + +const { Console } = require('console') + +const { Writable } = require('../../lib/ours/index') + +const async_hooks = require('async_hooks') // Make sure that repeated calls to silentConsole.log(), and by extension // stream.write() for the underlying stream, allocate exactly 1 tick object. // At the time of writing, that is enough to ensure a flat memory profile // from repeated silentConsole.log() calls, rather than having callbacks pile up @@ -17,35 +21,37 @@ const async_hooks = require('async_hooks'); // Refs: https://github.com/nodejs/node/issues/18013 // Refs: https://github.com/nodejs/node/issues/18367 -const checkTickCreated = common.mustCall(); - -const hook = async_hooks.createHook({ - init(id, type, triggerId, resoure) { - if (type === 'TickObject') checkTickCreated(); - } -}).enable(); - -const console = new Console(new Writable({ - write: common.mustCall((chunk, encoding, cb) => { - cb(); - }, 100) -})); - -for (let i = 0; i < 100; i++) - console.log(i); - - /* replacement start */ - process.on('beforeExit', (code) => { - hook.disable(); - }); - /* replacement end */ - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); +const checkTickCreated = common.mustCall() +const hook = async_hooks + .createHook({ + init(id, type, triggerId, resoure) { + if (type === 'TickObject') checkTickCreated() } - }); - /* replacement end */ + }) + .enable() +const console = new Console( + new Writable({ + write: common.mustCall((chunk, encoding, cb) => { + cb() + }, 100) + }) +) + +for (let i = 0; i < 100; i++) console.log(i) +/* replacement start */ + +process.on('beforeExit', (code) => { + hook.disable() +}) +/* replacement end */ + +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-writable.js b/test/parallel/test-stream-writable-writable.js index 7236b0862c..d5918c242f 100644 --- a/test/parallel/test-stream-writable-writable.js +++ b/test/parallel/test-stream-writable-writable.js @@ -1,63 +1,64 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Writable } = require('../../lib/ours/index') { const w = new Writable({ write() {} - }); - assert.strictEqual(w.writable, true); - w.destroy(); - assert.strictEqual(w.writable, false); + }) + assert.strictEqual(w.writable, true) + w.destroy() + assert.strictEqual(w.writable, false) } - { const w = new Writable({ write: common.mustCall((chunk, encoding, callback) => { - callback(new Error()); + callback(new Error()) }) - }); - assert.strictEqual(w.writable, true); - w.write('asd'); - assert.strictEqual(w.writable, false); - w.on('error', common.mustCall()); + }) + assert.strictEqual(w.writable, true) + w.write('asd') + assert.strictEqual(w.writable, false) + w.on('error', common.mustCall()) } - { const w = new Writable({ write: common.mustCall((chunk, encoding, callback) => { process.nextTick(() => { - callback(new Error()); - assert.strictEqual(w.writable, false); - }); + callback(new Error()) + assert.strictEqual(w.writable, false) + }) }) - }); - w.write('asd'); - w.on('error', common.mustCall()); + }) + w.write('asd') + w.on('error', common.mustCall()) } - { const w = new Writable({ write: common.mustNotCall() - }); - assert.strictEqual(w.writable, true); - w.end(); - assert.strictEqual(w.writable, false); + }) + assert.strictEqual(w.writable, true) + w.end() + assert.strictEqual(w.writable, false) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-write-cb-error.js b/test/parallel/test-stream-writable-write-cb-error.js index 26e998f600..9d505f159d 100644 --- a/test/parallel/test-stream-writable-write-cb-error.js +++ b/test/parallel/test-stream-writable-write-cb-error.js @@ -1,73 +1,83 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Writable } = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, -// Ensure callback is always invoked before + error() {} +} +const common = require('../common') + +const { Writable } = require('../../lib/ours/index') + +const assert = require('assert') // Ensure callback is always invoked before // error is emitted. Regardless if error was // sync or async. { - let callbackCalled = false; - // Sync Error + let callbackCalled = false // Sync Error + const writable = new Writable({ write: common.mustCall((buf, enc, cb) => { - cb(new Error()); + cb(new Error()) + }) + }) + writable.on( + 'error', + common.mustCall(() => { + assert.strictEqual(callbackCalled, true) + }) + ) + writable.write( + 'hi', + common.mustCall(() => { + callbackCalled = true }) - }); - writable.on('error', common.mustCall(() => { - assert.strictEqual(callbackCalled, true); - })); - writable.write('hi', common.mustCall(() => { - callbackCalled = true; - })); + ) } - { - let callbackCalled = false; - // Async Error + let callbackCalled = false // Async Error + const writable = new Writable({ write: common.mustCall((buf, enc, cb) => { - process.nextTick(cb, new Error()); + process.nextTick(cb, new Error()) + }) + }) + writable.on( + 'error', + common.mustCall(() => { + assert.strictEqual(callbackCalled, true) }) - }); - writable.on('error', common.mustCall(() => { - assert.strictEqual(callbackCalled, true); - })); - writable.write('hi', common.mustCall(() => { - callbackCalled = true; - })); + ) + writable.write( + 'hi', + common.mustCall(() => { + callbackCalled = true + }) + ) } - { // Sync Error const writable = new Writable({ write: common.mustCall((buf, enc, cb) => { - cb(new Error()); + cb(new Error()) }) - }); - - writable.on('error', common.mustCall()); + }) + writable.on('error', common.mustCall()) + let cnt = 0 // Ensure we don't live lock on sync error - let cnt = 0; - // Ensure we don't live lock on sync error - while (writable.write('a')) - cnt++; + while (writable.write('a')) cnt++ - assert.strictEqual(cnt, 0); + assert.strictEqual(cnt, 0) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-write-cb-twice.js b/test/parallel/test-stream-writable-write-cb-twice.js index 988206e697..b66f28153b 100644 --- a/test/parallel/test-stream-writable-write-cb-twice.js +++ b/test/parallel/test-stream-writable-write-cb-twice.js @@ -1,67 +1,78 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Writable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const { Writable } = require('../../lib/ours/index') { // Sync + Sync const writable = new Writable({ write: common.mustCall((buf, enc, cb) => { - cb(); - cb(); + cb() + cb() + }) + }) + writable.write('hi') + writable.on( + 'error', + common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK', + name: 'Error' }) - }); - writable.write('hi'); - writable.on('error', common.expectsError({ - code: 'ERR_MULTIPLE_CALLBACK', - name: 'Error' - })); + ) } - { // Sync + Async const writable = new Writable({ write: common.mustCall((buf, enc, cb) => { - cb(); + cb() process.nextTick(() => { - cb(); - }); + cb() + }) + }) + }) + writable.write('hi') + writable.on( + 'error', + common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK', + name: 'Error' }) - }); - writable.write('hi'); - writable.on('error', common.expectsError({ - code: 'ERR_MULTIPLE_CALLBACK', - name: 'Error' - })); + ) } - { // Async + Async const writable = new Writable({ write: common.mustCall((buf, enc, cb) => { - process.nextTick(cb); + process.nextTick(cb) process.nextTick(() => { - cb(); - }); + cb() + }) + }) + }) + writable.write('hi') + writable.on( + 'error', + common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK', + name: 'Error' }) - }); - writable.write('hi'); - writable.on('error', common.expectsError({ - code: 'ERR_MULTIPLE_CALLBACK', - name: 'Error' - })); + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-write-error.js b/test/parallel/test-stream-writable-write-error.js index 92af5af549..248a790969 100644 --- a/test/parallel/test-stream-writable-write-error.js +++ b/test/parallel/test-stream-writable-write-error.js @@ -1,34 +1,46 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Writable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Writable } = require('../../lib/ours/index') function expectError(w, args, code, sync) { if (sync) { if (code) { - assert.throws(() => w.write(...args), { code }); + assert.throws(() => w.write(...args), { + code + }) } else { - w.write(...args); + w.write(...args) } } else { - let errorCalled = false; - let ticked = false; - w.write(...args, common.mustCall((err) => { - assert.strictEqual(ticked, true); - assert.strictEqual(errorCalled, false); - assert.strictEqual(err.code, code); - })); - ticked = true; - w.on('error', common.mustCall((err) => { - errorCalled = true; - assert.strictEqual(err.code, code); - })); + let errorCalled = false + let ticked = false + w.write( + ...args, + common.mustCall((err) => { + assert.strictEqual(ticked, true) + assert.strictEqual(errorCalled, false) + assert.strictEqual(err.code, code) + }) + ) + ticked = true + w.on( + 'error', + common.mustCall((err) => { + errorCalled = true + assert.strictEqual(err.code, code) + }) + ) } } @@ -36,55 +48,56 @@ function test(autoDestroy) { { const w = new Writable({ autoDestroy, + _write() {} - }); - w.end(); - expectError(w, ['asd'], 'ERR_STREAM_WRITE_AFTER_END'); + }) + w.end() + expectError(w, ['asd'], 'ERR_STREAM_WRITE_AFTER_END') } - { const w = new Writable({ autoDestroy, + _write() {} - }); - w.destroy(); + }) + w.destroy() } - { const w = new Writable({ autoDestroy, + _write() {} - }); - expectError(w, [null], 'ERR_STREAM_NULL_VALUES', true); + }) + expectError(w, [null], 'ERR_STREAM_NULL_VALUES', true) } - { const w = new Writable({ autoDestroy, + _write() {} - }); - expectError(w, [{}], 'ERR_INVALID_ARG_TYPE', true); + }) + expectError(w, [{}], 'ERR_INVALID_ARG_TYPE', true) } - { const w = new Writable({ decodeStrings: false, autoDestroy, + _write() {} - }); - expectError(w, ['asd', 'noencoding'], 'ERR_UNKNOWN_ENCODING', true); + }) + expectError(w, ['asd', 'noencoding'], 'ERR_UNKNOWN_ENCODING', true) } } -test(false); -test(true); +test(false) +test(true) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writable-write-writev-finish.js b/test/parallel/test-stream-writable-write-writev-finish.js index 043b9bf07c..d6cd55a394 100644 --- a/test/parallel/test-stream-writable-write-writev-finish.js +++ b/test/parallel/test-stream-writable-write-writev-finish.js @@ -1,167 +1,173 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') -// Ensure consistency between the finish event when using cork() +const stream = require('../../lib/ours/index') // Ensure consistency between the finish event when using cork() // and writev and when not using them { - const writable = new stream.Writable(); + const writable = new stream.Writable() writable._write = (chunks, encoding, cb) => { - cb(new Error('write test error')); - }; - - writable.on('finish', common.mustNotCall()); - writable.on('prefinish', common.mustNotCall()); - writable.on('error', common.mustCall((er) => { - assert.strictEqual(er.message, 'write test error'); - })); - - writable.end('test'); + cb(new Error('write test error')) + } + + writable.on('finish', common.mustNotCall()) + writable.on('prefinish', common.mustNotCall()) + writable.on( + 'error', + common.mustCall((er) => { + assert.strictEqual(er.message, 'write test error') + }) + ) + writable.end('test') } - { - const writable = new stream.Writable(); + const writable = new stream.Writable() writable._write = (chunks, encoding, cb) => { - setImmediate(cb, new Error('write test error')); - }; - - writable.on('finish', common.mustNotCall()); - writable.on('prefinish', common.mustNotCall()); - writable.on('error', common.mustCall((er) => { - assert.strictEqual(er.message, 'write test error'); - })); - - writable.end('test'); + setImmediate(cb, new Error('write test error')) + } + + writable.on('finish', common.mustNotCall()) + writable.on('prefinish', common.mustNotCall()) + writable.on( + 'error', + common.mustCall((er) => { + assert.strictEqual(er.message, 'write test error') + }) + ) + writable.end('test') } - { - const writable = new stream.Writable(); + const writable = new stream.Writable() writable._write = (chunks, encoding, cb) => { - cb(new Error('write test error')); - }; + cb(new Error('write test error')) + } writable._writev = (chunks, cb) => { - cb(new Error('writev test error')); - }; - - writable.on('finish', common.mustNotCall()); - writable.on('prefinish', common.mustNotCall()); - writable.on('error', common.mustCall((er) => { - assert.strictEqual(er.message, 'writev test error'); - })); - - writable.cork(); - writable.write('test'); - - setImmediate(function() { - writable.end('test'); - }); + cb(new Error('writev test error')) + } + + writable.on('finish', common.mustNotCall()) + writable.on('prefinish', common.mustNotCall()) + writable.on( + 'error', + common.mustCall((er) => { + assert.strictEqual(er.message, 'writev test error') + }) + ) + writable.cork() + writable.write('test') + setImmediate(function () { + writable.end('test') + }) } - { - const writable = new stream.Writable(); + const writable = new stream.Writable() writable._write = (chunks, encoding, cb) => { - setImmediate(cb, new Error('write test error')); - }; + setImmediate(cb, new Error('write test error')) + } writable._writev = (chunks, cb) => { - setImmediate(cb, new Error('writev test error')); - }; - - writable.on('finish', common.mustNotCall()); - writable.on('prefinish', common.mustNotCall()); - writable.on('error', common.mustCall((er) => { - assert.strictEqual(er.message, 'writev test error'); - })); - - writable.cork(); - writable.write('test'); - - setImmediate(function() { - writable.end('test'); - }); -} - -// Regression test for + setImmediate(cb, new Error('writev test error')) + } + + writable.on('finish', common.mustNotCall()) + writable.on('prefinish', common.mustNotCall()) + writable.on( + 'error', + common.mustCall((er) => { + assert.strictEqual(er.message, 'writev test error') + }) + ) + writable.cork() + writable.write('test') + setImmediate(function () { + writable.end('test') + }) +} // Regression test for // https://github.com/nodejs/node/issues/13812 { - const rs = new stream.Readable(); - rs.push('ok'); - rs.push(null); - rs._read = () => {}; + const rs = new stream.Readable() + rs.push('ok') + rs.push(null) - const ws = new stream.Writable(); + rs._read = () => {} - ws.on('finish', common.mustNotCall()); - ws.on('error', common.mustCall()); + const ws = new stream.Writable() + ws.on('finish', common.mustNotCall()) + ws.on('error', common.mustCall()) ws._write = (chunk, encoding, done) => { - setImmediate(done, new Error()); - }; - rs.pipe(ws); -} + setImmediate(done, new Error()) + } + rs.pipe(ws) +} { - const rs = new stream.Readable(); - rs.push('ok'); - rs.push(null); - rs._read = () => {}; + const rs = new stream.Readable() + rs.push('ok') + rs.push(null) - const ws = new stream.Writable(); + rs._read = () => {} - ws.on('finish', common.mustNotCall()); - ws.on('error', common.mustCall()); + const ws = new stream.Writable() + ws.on('finish', common.mustNotCall()) + ws.on('error', common.mustCall()) ws._write = (chunk, encoding, done) => { - done(new Error()); - }; - rs.pipe(ws); -} + done(new Error()) + } + rs.pipe(ws) +} { - const w = new stream.Writable(); + const w = new stream.Writable() + w._write = (chunk, encoding, cb) => { - process.nextTick(cb); - }; - w.on('error', common.mustCall()); - w.on('finish', common.mustNotCall()); + process.nextTick(cb) + } + + w.on('error', common.mustCall()) + w.on('finish', common.mustNotCall()) w.on('prefinish', () => { - w.write("shouldn't write in prefinish listener"); - }); - w.end(); + w.write("shouldn't write in prefinish listener") + }) + w.end() } - { - const w = new stream.Writable(); + const w = new stream.Writable() + w._write = (chunk, encoding, cb) => { - process.nextTick(cb); - }; - w.on('error', common.mustCall()); + process.nextTick(cb) + } + + w.on('error', common.mustCall()) w.on('finish', () => { - w.write("shouldn't write in finish listener"); - }); - w.end(); + w.write("shouldn't write in finish listener") + }) + w.end() } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writableState-ending.js b/test/parallel/test-stream-writableState-ending.js index 29d9427e80..6eaac1958c 100644 --- a/test/parallel/test-stream-writableState-ending.js +++ b/test/parallel/test-stream-writableState-ending.js @@ -1,52 +1,52 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -require('../common'); + error() {} +} +require('../common') + +const assert = require('assert') -const assert = require('assert'); -const stream = require('../../lib/ours/index'); +const stream = require('../../lib/ours/index') -const writable = new stream.Writable(); +const writable = new stream.Writable() function testStates(ending, finished, ended) { - assert.strictEqual(writable._writableState.ending, ending); - assert.strictEqual(writable._writableState.finished, finished); - assert.strictEqual(writable._writableState.ended, ended); + assert.strictEqual(writable._writableState.ending, ending) + assert.strictEqual(writable._writableState.finished, finished) + assert.strictEqual(writable._writableState.ended, ended) } writable._write = (chunk, encoding, cb) => { // Ending, finished, ended start in false. - testStates(false, false, false); - cb(); -}; + testStates(false, false, false) + cb() +} writable.on('finish', () => { // Ending, finished, ended = true. - testStates(true, true, true); -}); - + testStates(true, true, true) +}) const result = writable.end('testing function end()', () => { // Ending, finished, ended = true. - testStates(true, true, true); -}); - -// End returns the writable instance -assert.strictEqual(result, writable); + testStates(true, true, true) +}) // End returns the writable instance -// Ending, ended = true. +assert.strictEqual(result, writable) // Ending, ended = true. // finished = false. -testStates(true, false, true); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + +testStates(true, false, true) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js index 05edabd29d..99b3a46c73 100644 --- a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js +++ b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js @@ -1,72 +1,66 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; +const silentConsole = { + log() {}, -const common = require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') -const writable = new stream.Writable(); +const assert = require('assert') -writable._writev = common.mustCall((chunks, cb) => { - assert.strictEqual(chunks.length, 2); - cb(); -}, 1); +const stream = require('../../lib/ours/index') +const writable = new stream.Writable() +writable._writev = common.mustCall((chunks, cb) => { + assert.strictEqual(chunks.length, 2) + cb() +}, 1) writable._write = common.mustCall((chunk, encoding, cb) => { - cb(); -}, 1); + cb() +}, 1) // first cork -// first cork -writable.cork(); -assert.strictEqual(writable._writableState.corked, 1); -assert.strictEqual(writable._writableState.bufferedRequestCount, 0); +writable.cork() +assert.strictEqual(writable._writableState.corked, 1) +assert.strictEqual(writable._writableState.bufferedRequestCount, 0) // cork again -// cork again -writable.cork(); -assert.strictEqual(writable._writableState.corked, 2); +writable.cork() +assert.strictEqual(writable._writableState.corked, 2) // The first chunk is buffered -// The first chunk is buffered -writable.write('first chunk'); -assert.strictEqual(writable._writableState.bufferedRequestCount, 1); +writable.write('first chunk') +assert.strictEqual(writable._writableState.bufferedRequestCount, 1) // First uncork does nothing -// First uncork does nothing -writable.uncork(); -assert.strictEqual(writable._writableState.corked, 1); -assert.strictEqual(writable._writableState.bufferedRequestCount, 1); +writable.uncork() +assert.strictEqual(writable._writableState.corked, 1) +assert.strictEqual(writable._writableState.bufferedRequestCount, 1) +process.nextTick(uncork) // The second chunk is buffered, because we uncork at the end of tick -process.nextTick(uncork); - -// The second chunk is buffered, because we uncork at the end of tick -writable.write('second chunk'); -assert.strictEqual(writable._writableState.corked, 1); -assert.strictEqual(writable._writableState.bufferedRequestCount, 2); +writable.write('second chunk') +assert.strictEqual(writable._writableState.corked, 1) +assert.strictEqual(writable._writableState.bufferedRequestCount, 2) function uncork() { // Second uncork flushes the buffer - writable.uncork(); - assert.strictEqual(writable._writableState.corked, 0); - assert.strictEqual(writable._writableState.bufferedRequestCount, 0); + writable.uncork() + assert.strictEqual(writable._writableState.corked, 0) + assert.strictEqual(writable._writableState.bufferedRequestCount, 0) // Verify that end() uncorks correctly - // Verify that end() uncorks correctly - writable.cork(); - writable.write('third chunk'); - writable.end(); + writable.cork() + writable.write('third chunk') + writable.end() // End causes an uncork() as well - // End causes an uncork() as well - assert.strictEqual(writable._writableState.corked, 0); - assert.strictEqual(writable._writableState.bufferedRequestCount, 0); + assert.strictEqual(writable._writableState.corked, 0) + assert.strictEqual(writable._writableState.bufferedRequestCount, 0) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js index 9915c989a3..031e4bfdf6 100644 --- a/test/parallel/test-stream-write-destroy.js +++ b/test/parallel/test-stream-write-destroy.js @@ -1,77 +1,80 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const { Writable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// Test interaction between calling .destroy() on a writable and pending -// writes. + error() {} +} +require('../common') -for (const withPendingData of [ false, true ]) { - for (const useEnd of [ false, true ]) { - const callbacks = []; +const assert = require('assert') +const { Writable } = require('../../lib/ours/index') // Test interaction between calling .destroy() on a writable and pending +// writes. + +for (const withPendingData of [false, true]) { + for (const useEnd of [false, true]) { + const callbacks = [] const w = new Writable({ write(data, enc, cb) { - callbacks.push(cb); + callbacks.push(cb) }, + // Effectively disable the HWM to observe 'drain' events more easily. highWaterMark: 1 - }); - - let chunksWritten = 0; - let drains = 0; - w.on('drain', () => drains++); + }) + let chunksWritten = 0 + let drains = 0 + w.on('drain', () => drains++) function onWrite(err) { if (err) { - assert.strictEqual(w.destroyed, true); - assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + assert.strictEqual(w.destroyed, true) + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED') } else { - chunksWritten++; + chunksWritten++ } } - w.write('abc', onWrite); - assert.strictEqual(chunksWritten, 0); - assert.strictEqual(drains, 0); - callbacks.shift()(); - assert.strictEqual(chunksWritten, 1); - assert.strictEqual(drains, 1); + w.write('abc', onWrite) + assert.strictEqual(chunksWritten, 0) + assert.strictEqual(drains, 0) + callbacks.shift()() + assert.strictEqual(chunksWritten, 1) + assert.strictEqual(drains, 1) if (withPendingData) { // Test 2 cases: There either is or is not data still in the write queue. // (The second write will never actually get executed either way.) - w.write('def', onWrite); + w.write('def', onWrite) } + if (useEnd) { // Again, test 2 cases: Either we indicate that we want to end the // writable or not. - w.end('ghi', onWrite); + w.end('ghi', onWrite) } else { - w.write('ghi', onWrite); + w.write('ghi', onWrite) } - assert.strictEqual(chunksWritten, 1); - w.destroy(); - assert.strictEqual(chunksWritten, 1); - callbacks.shift()(); - assert.strictEqual(chunksWritten, useEnd && !withPendingData ? 1 : 2); - assert.strictEqual(callbacks.length, 0); - assert.strictEqual(drains, 1); + assert.strictEqual(chunksWritten, 1) + w.destroy() + assert.strictEqual(chunksWritten, 1) + callbacks.shift()() + assert.strictEqual(chunksWritten, useEnd && !withPendingData ? 1 : 2) + assert.strictEqual(callbacks.length, 0) + assert.strictEqual(drains, 1) } } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-write-drain.js b/test/parallel/test-stream-write-drain.js index c0bed561f1..dffbb18271 100644 --- a/test/parallel/test-stream-write-drain.js +++ b/test/parallel/test-stream-write-drain.js @@ -1,31 +1,33 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const { Writable } = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -// Don't emit 'drain' if ended + error() {} +} +const common = require('../common') + +const { Writable } = require('../../lib/ours/index') // Don't emit 'drain' if ended const w = new Writable({ write(data, enc, cb) { - process.nextTick(cb); + process.nextTick(cb) }, - highWaterMark: 1 -}); -w.on('drain', common.mustNotCall()); -w.write('asd'); -w.end(); + highWaterMark: 1 +}) +w.on('drain', common.mustNotCall()) +w.write('asd') +w.end() +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-write-final.js b/test/parallel/test-stream-write-final.js index cc802acca1..0b21343a9c 100644 --- a/test/parallel/test-stream-write-final.js +++ b/test/parallel/test-stream-write-final.js @@ -1,39 +1,46 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const stream = require('../../lib/ours/index'); -let shutdown = false; + error() {} +} +const common = require('../common') +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +let shutdown = false const w = new stream.Writable({ - final: common.mustCall(function(cb) { - assert.strictEqual(this, w); - setTimeout(function() { - shutdown = true; - cb(); - }, 100); + final: common.mustCall(function (cb) { + assert.strictEqual(this, w) + setTimeout(function () { + shutdown = true + cb() + }, 100) }), - write: function(chunk, e, cb) { - process.nextTick(cb); + write: function (chunk, e, cb) { + process.nextTick(cb) } -}); -w.on('finish', common.mustCall(function() { - assert(shutdown); -})); -w.write(Buffer.allocUnsafe(1)); -w.end(Buffer.allocUnsafe(0)); +}) +w.on( + 'finish', + common.mustCall(function () { + assert(shutdown) + }) +) +w.write(Buffer.allocUnsafe(1)) +w.end(Buffer.allocUnsafe(0)) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js index eaae012f52..17b91aa474 100644 --- a/test/parallel/test-stream-writev.js +++ b/test/parallel/test-stream-writev.js @@ -18,128 +18,148 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') -const stream = require('../../lib/ours/index'); +const queue = [] -const queue = []; for (let decode = 0; decode < 2; decode++) { for (let uncork = 0; uncork < 2; uncork++) { for (let multi = 0; multi < 2; multi++) { - queue.push([!!decode, !!uncork, !!multi]); + queue.push([!!decode, !!uncork, !!multi]) } } } -run(); +run() function run() { - const t = queue.pop(); - if (t) - test(t[0], t[1], t[2], run); - else - silentConsole.log('ok'); + const t = queue.pop() + if (t) test(t[0], t[1], t[2], run) + else silentConsole.log('ok') } function test(decode, uncork, multi, next) { - silentConsole.log(`# decode=${decode} uncork=${uncork} multi=${multi}`); - let counter = 0; - let expectCount = 0; + silentConsole.log(`# decode=${decode} uncork=${uncork} multi=${multi}`) + let counter = 0 + let expectCount = 0 + function cnt(msg) { - expectCount++; - const expect = expectCount; - return function(er) { - assert.ifError(er); - counter++; - assert.strictEqual(counter, expect); - }; + expectCount++ + const expect = expectCount + return function (er) { + assert.ifError(er) + counter++ + assert.strictEqual(counter, expect) + } } - const w = new stream.Writable({ decodeStrings: decode }); - w._write = common.mustNotCall('Should not call _write'); - - const expectChunks = decode ? [ - { encoding: 'buffer', - chunk: [104, 101, 108, 108, 111, 44, 32] }, - { encoding: 'buffer', - chunk: [119, 111, 114, 108, 100] }, - { encoding: 'buffer', - chunk: [33] }, - { encoding: 'buffer', - chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, - { encoding: 'buffer', - chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] }, - ] : [ - { encoding: 'ascii', chunk: 'hello, ' }, - { encoding: 'utf8', chunk: 'world' }, - { encoding: 'buffer', chunk: [33] }, - { encoding: 'latin1', chunk: '\nand then...' }, - { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }, - ]; - - let actualChunks; - w._writev = function(chunks, cb) { - actualChunks = chunks.map(function(chunk) { + const w = new stream.Writable({ + decodeStrings: decode + }) + w._write = common.mustNotCall('Should not call _write') + const expectChunks = decode + ? [ + { + encoding: 'buffer', + chunk: [104, 101, 108, 108, 111, 44, 32] + }, + { + encoding: 'buffer', + chunk: [119, 111, 114, 108, 100] + }, + { + encoding: 'buffer', + chunk: [33] + }, + { + encoding: 'buffer', + chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] + }, + { + encoding: 'buffer', + chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] + } + ] + : [ + { + encoding: 'ascii', + chunk: 'hello, ' + }, + { + encoding: 'utf8', + chunk: 'world' + }, + { + encoding: 'buffer', + chunk: [33] + }, + { + encoding: 'latin1', + chunk: '\nand then...' + }, + { + encoding: 'hex', + chunk: 'facebea7deadbeefdecafbad' + } + ] + let actualChunks + + w._writev = function (chunks, cb) { + actualChunks = chunks.map(function (chunk) { return { encoding: chunk.encoding, - chunk: Buffer.isBuffer(chunk.chunk) ? - Array.prototype.slice.call(chunk.chunk) : chunk.chunk - }; - }); - cb(); - }; - - w.cork(); - w.write('hello, ', 'ascii', cnt('hello')); - w.write('world', 'utf8', cnt('world')); - - if (multi) - w.cork(); - - w.write(Buffer.from('!'), 'buffer', cnt('!')); - w.write('\nand then...', 'latin1', cnt('and then')); - - if (multi) - w.uncork(); - - w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')); - - if (uncork) - w.uncork(); - - w.end(cnt('end')); + chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk + } + }) + cb() + } - w.on('finish', function() { + w.cork() + w.write('hello, ', 'ascii', cnt('hello')) + w.write('world', 'utf8', cnt('world')) + if (multi) w.cork() + w.write(Buffer.from('!'), 'buffer', cnt('!')) + w.write('\nand then...', 'latin1', cnt('and then')) + if (multi) w.uncork() + w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex')) + if (uncork) w.uncork() + w.end(cnt('end')) + w.on('finish', function () { // Make sure finish comes after all the write cb - cnt('finish')(); - assert.deepStrictEqual(actualChunks, expectChunks); - next(); - }); + cnt('finish')() + assert.deepStrictEqual(actualChunks, expectChunks) + next() + }) } { const w = new stream.Writable({ - writev: common.mustCall(function(chunks, cb) { - cb(); + writev: common.mustCall(function (chunks, cb) { + cb() }) - }); - w.write('asd', common.mustCall()); + }) + w.write('asd', common.mustCall()) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js index b11e27e674..afab641d37 100644 --- a/test/parallel/test-stream2-base64-single-char-read-end.js +++ b/test/parallel/test-stream2-base64-single-char-read-end.js @@ -18,54 +18,58 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const { Readable: R, Writable: W } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +require('../common') -const src = new R({ encoding: 'base64' }); -const dst = new W(); -let hasRead = false; -const accum = []; +const { Readable: R, Writable: W } = require('../../lib/ours/index') -src._read = function(n) { - if (!hasRead) { - hasRead = true; - process.nextTick(function() { - src.push(Buffer.from('1')); - src.push(null); - }); - } -}; +const assert = require('assert') -dst._write = function(chunk, enc, cb) { - accum.push(chunk); - cb(); -}; +const src = new R({ + encoding: 'base64' +}) +const dst = new W() +let hasRead = false +const accum = [] -src.on('end', function() { - assert.strictEqual(String(Buffer.concat(accum)), 'MQ=='); - clearTimeout(timeout); -}); +src._read = function (n) { + if (!hasRead) { + hasRead = true + process.nextTick(function () { + src.push(Buffer.from('1')) + src.push(null) + }) + } +} -src.pipe(dst); +dst._write = function (chunk, enc, cb) { + accum.push(chunk) + cb() +} -const timeout = setTimeout(function() { - assert.fail('timed out waiting for _write'); -}, 100); +src.on('end', function () { + assert.strictEqual(String(Buffer.concat(accum)), 'MQ==') + clearTimeout(timeout) +}) +src.pipe(dst) +const timeout = setTimeout(function () { + assert.fail('timed out waiting for _write') +}, 100) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js index b75f95a100..81ef6080a7 100644 --- a/test/parallel/test-stream2-basic.js +++ b/test/parallel/test-stream2-basic.js @@ -18,443 +18,401 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; + error() {} +} +const common = require('../common') + +const { Readable: R, Writable: W } = require('../../lib/ours/index') -const common = require('../common'); -const { Readable: R, Writable: W } = require('../../lib/ours/index'); -const assert = require('assert'); +const assert = require('assert') -const EE = require('events').EventEmitter; +const EE = require('events').EventEmitter class TestReader extends R { constructor(n) { - super(); - this._buffer = Buffer.alloc(n || 100, 'x'); - this._pos = 0; - this._bufs = 10; + super() + this._buffer = Buffer.alloc(n || 100, 'x') + this._pos = 0 + this._bufs = 10 } _read(n) { - const max = this._buffer.length - this._pos; - n = Math.max(n, 0); - const toRead = Math.min(n, max); + const max = this._buffer.length - this._pos + n = Math.max(n, 0) + const toRead = Math.min(n, max) + if (toRead === 0) { // Simulate the read buffer filling up with some more bytes some time // in the future. setTimeout(() => { - this._pos = 0; - this._bufs -= 1; + this._pos = 0 + this._bufs -= 1 + if (this._bufs <= 0) { // read them all! - if (!this.ended) - this.push(null); + if (!this.ended) this.push(null) } else { // now we have more. // kinda cheating by calling _read, but whatever, // it's just fake anyway. - this._read(n); + this._read(n) } - }, 10); - return; + }, 10) + return } - const ret = this._buffer.slice(this._pos, this._pos + toRead); - this._pos += toRead; - this.push(ret); + const ret = this._buffer.slice(this._pos, this._pos + toRead) + + this._pos += toRead + this.push(ret) } } class TestWriter extends EE { constructor() { - super(); - this.received = []; - this.flush = false; + super() + this.received = [] + this.flush = false } write(c) { - this.received.push(c.toString()); - this.emit('write', c); - return true; + this.received.push(c.toString()) + this.emit('write', c) + return true } end(c) { - if (c) this.write(c); - this.emit('end', this.received); + if (c) this.write(c) + this.emit('end', this.received) } } { // Test basic functionality - const r = new TestReader(20); - - const reads = []; - const expect = [ 'x', - 'xx', - 'xxx', - 'xxxx', - 'xxxxx', - 'xxxxxxxxx', - 'xxxxxxxxxx', - 'xxxxxxxxxxxx', - 'xxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxxxxxxxxxxxx', - 'xxxxxxxxxxxxxxxxxxxxx' ]; - - r.on('end', common.mustCall(function() { - assert.deepStrictEqual(reads, expect); - })); - - let readSize = 1; + const r = new TestReader(20) + const reads = [] + const expect = [ + 'x', + 'xx', + 'xxx', + 'xxxx', + 'xxxxx', + 'xxxxxxxxx', + 'xxxxxxxxxx', + 'xxxxxxxxxxxx', + 'xxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxxxxxx', + 'xxxxxxxxxxxxxxxxxxxxx' + ] + r.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(reads, expect) + }) + ) + let readSize = 1 + function flow() { - let res; + let res + while (null !== (res = r.read(readSize++))) { - reads.push(res.toString()); + reads.push(res.toString()) } - r.once('readable', flow); + + r.once('readable', flow) } - flow(); + flow() } - { // Verify pipe - const r = new TestReader(5); - - const expect = [ 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx' ]; - - const w = new TestWriter(); - - w.on('end', common.mustCall(function(received) { - assert.deepStrictEqual(received, expect); - })); - - r.pipe(w); + const r = new TestReader(5) + const expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'] + const w = new TestWriter() + w.on( + 'end', + common.mustCall(function (received) { + assert.deepStrictEqual(received, expect) + }) + ) + r.pipe(w) } - - -[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function(SPLIT) { +;[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function (SPLIT) { // Verify unpipe - const r = new TestReader(5); - - // Unpipe after 3 writes, then write to another stream instead. - let expect = [ 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx' ]; - expect = [ expect.slice(0, SPLIT), expect.slice(SPLIT) ]; - - const w = [ new TestWriter(), new TestWriter() ]; - - let writes = SPLIT; - w[0].on('write', function() { + const r = new TestReader(5) // Unpipe after 3 writes, then write to another stream instead. + + let expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'] + expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)] + const w = [new TestWriter(), new TestWriter()] + let writes = SPLIT + w[0].on('write', function () { if (--writes === 0) { - r.unpipe(); - assert.deepStrictEqual(r._readableState.pipes, []); - w[0].end(); - r.pipe(w[1]); - assert.deepStrictEqual(r._readableState.pipes, [w[1]]); + r.unpipe() + assert.deepStrictEqual(r._readableState.pipes, []) + w[0].end() + r.pipe(w[1]) + assert.deepStrictEqual(r._readableState.pipes, [w[1]]) } - }); - - let ended = 0; - - w[0].on('end', common.mustCall(function(results) { - ended++; - assert.strictEqual(ended, 1); - assert.deepStrictEqual(results, expect[0]); - })); - - w[1].on('end', common.mustCall(function(results) { - ended++; - assert.strictEqual(ended, 2); - assert.deepStrictEqual(results, expect[1]); - })); - - r.pipe(w[0]); -}); - - + }) + let ended = 0 + w[0].on( + 'end', + common.mustCall(function (results) { + ended++ + assert.strictEqual(ended, 1) + assert.deepStrictEqual(results, expect[0]) + }) + ) + w[1].on( + 'end', + common.mustCall(function (results) { + ended++ + assert.strictEqual(ended, 2) + assert.deepStrictEqual(results, expect[1]) + }) + ) + r.pipe(w[0]) +}) { // Verify both writers get the same data when piping to destinations - const r = new TestReader(5); - const w = [ new TestWriter(), new TestWriter() ]; - - const expect = [ 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx' ]; - - w[0].on('end', common.mustCall(function(received) { - assert.deepStrictEqual(received, expect); - })); - w[1].on('end', common.mustCall(function(received) { - assert.deepStrictEqual(received, expect); - })); - - r.pipe(w[0]); - r.pipe(w[1]); + const r = new TestReader(5) + const w = [new TestWriter(), new TestWriter()] + const expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'] + w[0].on( + 'end', + common.mustCall(function (received) { + assert.deepStrictEqual(received, expect) + }) + ) + w[1].on( + 'end', + common.mustCall(function (received) { + assert.deepStrictEqual(received, expect) + }) + ) + r.pipe(w[0]) + r.pipe(w[1]) } - - -[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function(SPLIT) { +;[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function (SPLIT) { // Verify multi-unpipe - const r = new TestReader(5); - - // Unpipe after 3 writes, then write to another stream instead. - let expect = [ 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx', - 'xxxxx' ]; - expect = [ expect.slice(0, SPLIT), expect.slice(SPLIT) ]; - - const w = [ new TestWriter(), new TestWriter(), new TestWriter() ]; - - let writes = SPLIT; - w[0].on('write', function() { + const r = new TestReader(5) // Unpipe after 3 writes, then write to another stream instead. + + let expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx'] + expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)] + const w = [new TestWriter(), new TestWriter(), new TestWriter()] + let writes = SPLIT + w[0].on('write', function () { if (--writes === 0) { - r.unpipe(); - w[0].end(); - r.pipe(w[1]); + r.unpipe() + w[0].end() + r.pipe(w[1]) } - }); - - let ended = 0; - - w[0].on('end', common.mustCall(function(results) { - ended++; - assert.strictEqual(ended, 1); - assert.deepStrictEqual(results, expect[0]); - })); - - w[1].on('end', common.mustCall(function(results) { - ended++; - assert.strictEqual(ended, 2); - assert.deepStrictEqual(results, expect[1]); - })); - - r.pipe(w[0]); - r.pipe(w[2]); -}); - + }) + let ended = 0 + w[0].on( + 'end', + common.mustCall(function (results) { + ended++ + assert.strictEqual(ended, 1) + assert.deepStrictEqual(results, expect[0]) + }) + ) + w[1].on( + 'end', + common.mustCall(function (results) { + ended++ + assert.strictEqual(ended, 2) + assert.deepStrictEqual(results, expect[1]) + }) + ) + r.pipe(w[0]) + r.pipe(w[2]) +}) { // Verify that back pressure is respected - const r = new R({ objectMode: true }); - r._read = common.mustNotCall(); - let counter = 0; - r.push(['one']); - r.push(['two']); - r.push(['three']); - r.push(['four']); - r.push(null); - - const w1 = new R(); - w1.write = function(chunk) { - assert.strictEqual(chunk[0], 'one'); - w1.emit('close'); - process.nextTick(function() { - r.pipe(w2); - r.pipe(w3); - }); - }; - w1.end = common.mustNotCall(); - - r.pipe(w1); - - const expected = ['two', 'two', 'three', 'three', 'four', 'four']; - - const w2 = new R(); - w2.write = function(chunk) { - assert.strictEqual(chunk[0], expected.shift()); - assert.strictEqual(counter, 0); - - counter++; + const r = new R({ + objectMode: true + }) + r._read = common.mustNotCall() + let counter = 0 + r.push(['one']) + r.push(['two']) + r.push(['three']) + r.push(['four']) + r.push(null) + const w1 = new R() + + w1.write = function (chunk) { + assert.strictEqual(chunk[0], 'one') + w1.emit('close') + process.nextTick(function () { + r.pipe(w2) + r.pipe(w3) + }) + } + + w1.end = common.mustNotCall() + r.pipe(w1) + const expected = ['two', 'two', 'three', 'three', 'four', 'four'] + const w2 = new R() + + w2.write = function (chunk) { + assert.strictEqual(chunk[0], expected.shift()) + assert.strictEqual(counter, 0) + counter++ if (chunk[0] === 'four') { - return true; + return true } - setTimeout(function() { - counter--; - w2.emit('drain'); - }, 10); - - return false; - }; - w2.end = common.mustCall(); + setTimeout(function () { + counter-- + w2.emit('drain') + }, 10) + return false + } - const w3 = new R(); - w3.write = function(chunk) { - assert.strictEqual(chunk[0], expected.shift()); - assert.strictEqual(counter, 1); + w2.end = common.mustCall() + const w3 = new R() - counter++; + w3.write = function (chunk) { + assert.strictEqual(chunk[0], expected.shift()) + assert.strictEqual(counter, 1) + counter++ if (chunk[0] === 'four') { - return true; + return true } - setTimeout(function() { - counter--; - w3.emit('drain'); - }, 50); - - return false; - }; - w3.end = common.mustCall(function() { - assert.strictEqual(counter, 2); - assert.strictEqual(expected.length, 0); - }); -} + setTimeout(function () { + counter-- + w3.emit('drain') + }, 50) + return false + } + w3.end = common.mustCall(function () { + assert.strictEqual(counter, 2) + assert.strictEqual(expected.length, 0) + }) +} { // Verify read(0) behavior for ended streams - const r = new R(); - let written = false; - let ended = false; - r._read = common.mustNotCall(); - - r.push(Buffer.from('foo')); - r.push(null); - - const v = r.read(0); - - assert.strictEqual(v, null); - - const w = new R(); - w.write = function(buffer) { - written = true; - assert.strictEqual(ended, false); - assert.strictEqual(buffer.toString(), 'foo'); - }; - - w.end = common.mustCall(function() { - ended = true; - assert.strictEqual(written, true); - }); + const r = new R() + let written = false + let ended = false + r._read = common.mustNotCall() + r.push(Buffer.from('foo')) + r.push(null) + const v = r.read(0) + assert.strictEqual(v, null) + const w = new R() + + w.write = function (buffer) { + written = true + assert.strictEqual(ended, false) + assert.strictEqual(buffer.toString(), 'foo') + } - r.pipe(w); + w.end = common.mustCall(function () { + ended = true + assert.strictEqual(written, true) + }) + r.pipe(w) } - { // Verify synchronous _read ending - const r = new R(); - let called = false; - r._read = function(n) { - r.push(null); - }; - - r.once('end', function() { - // Verify that this is called before the next tick - called = true; - }); + const r = new R() + let called = false - r.read(); + r._read = function (n) { + r.push(null) + } - process.nextTick(function() { - assert.strictEqual(called, true); - }); + r.once('end', function () { + // Verify that this is called before the next tick + called = true + }) + r.read() + process.nextTick(function () { + assert.strictEqual(called, true) + }) } - { // Verify that adding readable listeners trigger data flow - const r = new R({ highWaterMark: 5 }); - let onReadable = false; - let readCalled = 0; - - r._read = function(n) { - if (readCalled++ === 2) - r.push(null); - else - r.push(Buffer.from('asdf')); - }; - - r.on('readable', function() { - onReadable = true; - r.read(); - }); - - r.on('end', common.mustCall(function() { - assert.strictEqual(readCalled, 3); - assert.ok(onReadable); - })); -} + const r = new R({ + highWaterMark: 5 + }) + let onReadable = false + let readCalled = 0 + + r._read = function (n) { + if (readCalled++ === 2) r.push(null) + else r.push(Buffer.from('asdf')) + } + r.on('readable', function () { + onReadable = true + r.read() + }) + r.on( + 'end', + common.mustCall(function () { + assert.strictEqual(readCalled, 3) + assert.ok(onReadable) + }) + ) +} { // Verify that streams are chainable - const r = new R(); - r._read = common.mustCall(); - const r2 = r.setEncoding('utf8').pause().resume().pause(); - assert.strictEqual(r, r2); + const r = new R() + r._read = common.mustCall() + const r2 = r.setEncoding('utf8').pause().resume().pause() + assert.strictEqual(r, r2) } - { // Verify readableEncoding property - assert(Reflect.has(R.prototype, 'readableEncoding')); - - const r = new R({ encoding: 'utf8' }); - assert.strictEqual(r.readableEncoding, 'utf8'); + assert(Reflect.has(R.prototype, 'readableEncoding')) + const r = new R({ + encoding: 'utf8' + }) + assert.strictEqual(r.readableEncoding, 'utf8') } - { // Verify readableObjectMode property - assert(Reflect.has(R.prototype, 'readableObjectMode')); - - const r = new R({ objectMode: true }); - assert.strictEqual(r.readableObjectMode, true); + assert(Reflect.has(R.prototype, 'readableObjectMode')) + const r = new R({ + objectMode: true + }) + assert.strictEqual(r.readableObjectMode, true) } - { // Verify writableObjectMode property - assert(Reflect.has(W.prototype, 'writableObjectMode')); - - const w = new W({ objectMode: true }); - assert.strictEqual(w.writableObjectMode, true); + assert(Reflect.has(W.prototype, 'writableObjectMode')) + const w = new W({ + objectMode: true + }) + assert.strictEqual(w.writableObjectMode, true) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js index 3ba2b9393c..c6bbfdb077 100644 --- a/test/parallel/test-stream2-compatibility.js +++ b/test/parallel/test-stream2-compatibility.js @@ -18,68 +18,70 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const { Readable: R, Writable: W } = require('../../lib/ours/index'); -const assert = require('assert'); + error() {} +} +require('../common') + +const { Readable: R, Writable: W } = require('../../lib/ours/index') + +const assert = require('assert') -let ondataCalled = 0; +let ondataCalled = 0 class TestReader extends R { constructor() { - super(); - this._buffer = Buffer.alloc(100, 'x'); - + super() + this._buffer = Buffer.alloc(100, 'x') this.on('data', () => { - ondataCalled++; - }); + ondataCalled++ + }) } _read(n) { - this.push(this._buffer); - this._buffer = Buffer.alloc(0); + this.push(this._buffer) + this._buffer = Buffer.alloc(0) } } -const reader = new TestReader(); -setImmediate(function() { - assert.strictEqual(ondataCalled, 1); - silentConsole.log('ok'); - reader.push(null); -}); +const reader = new TestReader() +setImmediate(function () { + assert.strictEqual(ondataCalled, 1) + silentConsole.log('ok') + reader.push(null) +}) class TestWriter extends W { constructor() { - super(); - this.write('foo'); - this.end(); + super() + this.write('foo') + this.end() } _write(chunk, enc, cb) { - cb(); + cb() } } -const writer = new TestWriter(); - -process.on('exit', function() { - assert.strictEqual(reader.readable, false); - assert.strictEqual(writer.writable, false); - silentConsole.log('ok'); -}); +const writer = new TestWriter() +process.on('exit', function () { + assert.strictEqual(reader.readable, false) + assert.strictEqual(writer.writable, false) + silentConsole.log('ok') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js index 1eba765509..3616e3f7a4 100644 --- a/test/parallel/test-stream2-decode-partial.js +++ b/test/parallel/test-stream2-decode-partial.js @@ -1,38 +1,42 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const { Readable } = require('../../lib/ours/index'); -const assert = require('assert'); +const silentConsole = { + log() {}, -let buf = ''; -const euro = Buffer.from([0xE2, 0x82, 0xAC]); -const cent = Buffer.from([0xC2, 0xA2]); -const source = Buffer.concat([euro, cent]); + error() {} +} +require('../common') -const readable = Readable({ encoding: 'utf8' }); -readable.push(source.slice(0, 2)); -readable.push(source.slice(2, 4)); -readable.push(source.slice(4, 6)); -readable.push(null); +const { Readable } = require('../../lib/ours/index') -readable.on('data', function(data) { - buf += data; -}); +const assert = require('assert') -process.on('exit', function() { - assert.strictEqual(buf, '€¢'); -}); +let buf = '' +const euro = Buffer.from([0xe2, 0x82, 0xac]) +const cent = Buffer.from([0xc2, 0xa2]) +const source = Buffer.concat([euro, cent]) +const readable = Readable({ + encoding: 'utf8' +}) +readable.push(source.slice(0, 2)) +readable.push(source.slice(2, 4)) +readable.push(source.slice(4, 6)) +readable.push(null) +readable.on('data', function (data) { + buf += data +}) +process.on('exit', function () { + assert.strictEqual(buf, '€¢') +}) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-finish-pipe-error.js b/test/parallel/test-stream2-finish-pipe-error.js index 1872b9cd8e..622f7151bc 100644 --- a/test/parallel/test-stream2-finish-pipe-error.js +++ b/test/parallel/test-stream2-finish-pipe-error.js @@ -1,35 +1,39 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const stream = require('../../lib/ours/index'); +const silentConsole = { + log() {}, -process.on('uncaughtException', common.mustCall()); + error() {} +} +const common = require('../common') -const r = new stream.Readable(); -r._read = function(size) { - r.push(Buffer.allocUnsafe(size)); -}; +const stream = require('../../lib/ours/index') -const w = new stream.Writable(); -w._write = function(data, encoding, cb) { - cb(null); -}; +process.on('uncaughtException', common.mustCall()) +const r = new stream.Readable() -r.pipe(w); +r._read = function (size) { + r.push(Buffer.allocUnsafe(size)) +} -// end() after pipe should cause unhandled exception -w.end(); +const w = new stream.Writable() - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +w._write = function (data, encoding, cb) { + cb(null) +} + +r.pipe(w) // end() after pipe should cause unhandled exception + +w.end() +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js index 5e97159e3e..afb5e5a899 100644 --- a/test/parallel/test-stream2-finish-pipe.js +++ b/test/parallel/test-stream2-finish-pipe.js @@ -18,42 +18,46 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const stream = require('../../lib/ours/index'); + error() {} +} +require('../common') -const r = new stream.Readable(); -r._read = function(size) { - r.push(Buffer.allocUnsafe(size)); -}; +const stream = require('../../lib/ours/index') -const w = new stream.Writable(); -w._write = function(data, encoding, cb) { - process.nextTick(cb, null); -}; +const r = new stream.Readable() -r.pipe(w); +r._read = function (size) { + r.push(Buffer.allocUnsafe(size)) +} + +const w = new stream.Writable() + +w._write = function (data, encoding, cb) { + process.nextTick(cb, null) +} + +r.pipe(w) // end() must be called in nextTick or a WRITE_AFTER_END error occurs. -// end() must be called in nextTick or a WRITE_AFTER_END error occurs. process.nextTick(() => { // This might sound unrealistic, but it happens in net.js. When // socket.allowHalfOpen === false, EOF will cause .destroySoon() call which // ends the writable side of net.Socket. - w.end(); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + w.end() +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-httpclient-response-end.js b/test/parallel/test-stream2-httpclient-response-end.js index b4d96aaedb..fbee5912d6 100644 --- a/test/parallel/test-stream2-httpclient-response-end.js +++ b/test/parallel/test-stream2-httpclient-response-end.js @@ -1,40 +1,62 @@ +'use strict' - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const http = require('http'); -const msg = 'Hello'; -const server = http.createServer(function(req, res) { - res.writeHead(200, { 'Content-Type': 'text/plain' }); - res.end(msg); -}).listen(0, function() { - http.get({ port: this.address().port }, function(res) { - let data = ''; - res.on('readable', common.mustCall(function() { - silentConsole.log('readable event'); - let chunk; - while ((chunk = res.read()) !== null) { - data += chunk; +const tap = require('tap') + +const silentConsole = { + log() {}, + + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const http = require('http') + +const msg = 'Hello' +const server = http + .createServer(function (req, res) { + res.writeHead(200, { + 'Content-Type': 'text/plain' + }) + res.end(msg) + }) + .listen(0, function () { + http.get( + { + port: this.address().port + }, + function (res) { + let data = '' + res.on( + 'readable', + common.mustCall(function () { + silentConsole.log('readable event') + let chunk + + while ((chunk = res.read()) !== null) { + data += chunk + } + }) + ) + res.on( + 'end', + common.mustCall(function () { + silentConsole.log('end event') + assert.strictEqual(msg, data) + server.close() + }) + ) } - })); - res.on('end', common.mustCall(function() { - silentConsole.log('end event'); - assert.strictEqual(msg, data); - server.close(); - })); - }); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + ) + }) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js index 87db2aca76..e76dedaa73 100644 --- a/test/parallel/test-stream2-large-read-stall.js +++ b/test/parallel/test-stream2-large-read-stall.js @@ -18,72 +18,70 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -// If everything aligns so that you do a read(n) of exactly the +const assert = require('assert') // If everything aligns so that you do a read(n) of exactly the // remaining buffer, then make sure that 'end' still emits. -const READSIZE = 100; -const PUSHSIZE = 20; -const PUSHCOUNT = 1000; -const HWM = 50; +const READSIZE = 100 +const PUSHSIZE = 20 +const PUSHCOUNT = 1000 +const HWM = 50 + +const Readable = require('../../lib/ours/index').Readable -const Readable = require('../../lib/ours/index').Readable; const r = new Readable({ highWaterMark: HWM -}); -const rs = r._readableState; - -r._read = push; +}) +const rs = r._readableState +r._read = push +r.on('readable', function () { + silentConsole.error('>> readable') + let ret -r.on('readable', function() { - silentConsole.error('>> readable'); - let ret; do { - silentConsole.error(` > read(${READSIZE})`); - ret = r.read(READSIZE); - silentConsole.error(` < ${ret && ret.length} (${rs.length} remain)`); - } while (ret && ret.length === READSIZE); + silentConsole.error(` > read(${READSIZE})`) + ret = r.read(READSIZE) + silentConsole.error(` < ${ret && ret.length} (${rs.length} remain)`) + } while (ret && ret.length === READSIZE) - silentConsole.error('<< after read()', - ret && ret.length, - rs.needReadable, - rs.length); -}); + silentConsole.error('<< after read()', ret && ret.length, rs.needReadable, rs.length) +}) +r.on( + 'end', + common.mustCall(function () { + assert.strictEqual(pushes, PUSHCOUNT + 1) + }) +) +let pushes = 0 -r.on('end', common.mustCall(function() { - assert.strictEqual(pushes, PUSHCOUNT + 1); -})); - -let pushes = 0; function push() { - if (pushes > PUSHCOUNT) - return; + if (pushes > PUSHCOUNT) return if (pushes++ === PUSHCOUNT) { - silentConsole.error(' push(EOF)'); - return r.push(null); + silentConsole.error(' push(EOF)') + return r.push(null) } - silentConsole.error(` push #${pushes}`); - if (r.push(Buffer.allocUnsafe(PUSHSIZE))) - setTimeout(push, 1); + silentConsole.error(` push #${pushes}`) + if (r.push(Buffer.allocUnsafe(PUSHSIZE))) setTimeout(push, 1) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js index 260c2349ae..d6fcf14403 100644 --- a/test/parallel/test-stream2-objects.js +++ b/test/parallel/test-stream2-objects.js @@ -18,295 +18,371 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; + error() {} +} +const common = require('../common') + +const { Readable, Writable } = require('../../lib/ours/index') -const common = require('../common'); -const { Readable, Writable } = require('../../lib/ours/index'); -const assert = require('assert'); +const assert = require('assert') function toArray(callback) { - const stream = new Writable({ objectMode: true }); - const list = []; - stream.write = function(chunk) { - list.push(chunk); - }; + const stream = new Writable({ + objectMode: true + }) + const list = [] - stream.end = common.mustCall(function() { - callback(list); - }); + stream.write = function (chunk) { + list.push(chunk) + } - return stream; + stream.end = common.mustCall(function () { + callback(list) + }) + return stream } function fromArray(list) { - const r = new Readable({ objectMode: true }); - r._read = common.mustNotCall(); - list.forEach(function(chunk) { - r.push(chunk); - }); - r.push(null); - - return r; + const r = new Readable({ + objectMode: true + }) + r._read = common.mustNotCall() + list.forEach(function (chunk) { + r.push(chunk) + }) + r.push(null) + return r } { // Verify that objects can be read from the stream - const r = fromArray([{ one: '1' }, { two: '2' }]); - - const v1 = r.read(); - const v2 = r.read(); - const v3 = r.read(); - - assert.deepStrictEqual(v1, { one: '1' }); - assert.deepStrictEqual(v2, { two: '2' }); - assert.strictEqual(v3, null); + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) + const v1 = r.read() + const v2 = r.read() + const v3 = r.read() + assert.deepStrictEqual(v1, { + one: '1' + }) + assert.deepStrictEqual(v2, { + two: '2' + }) + assert.strictEqual(v3, null) } - { // Verify that objects can be piped into the stream - const r = fromArray([{ one: '1' }, { two: '2' }]); - - r.pipe(toArray(common.mustCall(function(list) { - assert.deepStrictEqual(list, [ - { one: '1' }, - { two: '2' }, - ]); - }))); + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) + r.pipe( + toArray( + common.mustCall(function (list) { + assert.deepStrictEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) + }) + ) + ) } - { // Verify that read(n) is ignored - const r = fromArray([{ one: '1' }, { two: '2' }]); - const value = r.read(2); - - assert.deepStrictEqual(value, { one: '1' }); + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) + const value = r.read(2) + assert.deepStrictEqual(value, { + one: '1' + }) } - { // Verify that objects can be synchronously read - const r = new Readable({ objectMode: true }); - const list = [{ one: '1' }, { two: '2' }]; - r._read = function(n) { - const item = list.shift(); - r.push(item || null); - }; + const r = new Readable({ + objectMode: true + }) + const list = [ + { + one: '1' + }, + { + two: '2' + } + ] - r.pipe(toArray(common.mustCall(function(list) { - assert.deepStrictEqual(list, [ - { one: '1' }, - { two: '2' }, - ]); - }))); -} + r._read = function (n) { + const item = list.shift() + r.push(item || null) + } + r.pipe( + toArray( + common.mustCall(function (list) { + assert.deepStrictEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) + }) + ) + ) +} { // Verify that objects can be asynchronously read - const r = new Readable({ objectMode: true }); - const list = [{ one: '1' }, { two: '2' }]; - r._read = function(n) { - const item = list.shift(); - process.nextTick(function() { - r.push(item || null); - }); - }; + const r = new Readable({ + objectMode: true + }) + const list = [ + { + one: '1' + }, + { + two: '2' + } + ] - r.pipe(toArray(common.mustCall(function(list) { - assert.deepStrictEqual(list, [ - { one: '1' }, - { two: '2' }, - ]); - }))); -} + r._read = function (n) { + const item = list.shift() + process.nextTick(function () { + r.push(item || null) + }) + } + r.pipe( + toArray( + common.mustCall(function (list) { + assert.deepStrictEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) + }) + ) + ) +} { // Verify that strings can be read as objects const r = new Readable({ objectMode: true - }); - r._read = common.mustNotCall(); - const list = ['one', 'two', 'three']; - list.forEach(function(str) { - r.push(str); - }); - r.push(null); - - r.pipe(toArray(common.mustCall(function(array) { - assert.deepStrictEqual(array, list); - }))); + }) + r._read = common.mustNotCall() + const list = ['one', 'two', 'three'] + list.forEach(function (str) { + r.push(str) + }) + r.push(null) + r.pipe( + toArray( + common.mustCall(function (array) { + assert.deepStrictEqual(array, list) + }) + ) + ) } - { // Verify read(0) behavior for object streams const r = new Readable({ objectMode: true - }); - r._read = common.mustNotCall(); - - r.push('foobar'); - r.push(null); - - r.pipe(toArray(common.mustCall(function(array) { - assert.deepStrictEqual(array, ['foobar']); - }))); + }) + r._read = common.mustNotCall() + r.push('foobar') + r.push(null) + r.pipe( + toArray( + common.mustCall(function (array) { + assert.deepStrictEqual(array, ['foobar']) + }) + ) + ) } - { // Verify the behavior of pushing falsey values const r = new Readable({ objectMode: true - }); - r._read = common.mustNotCall(); - - r.push(false); - r.push(0); - r.push(''); - r.push(null); - - r.pipe(toArray(common.mustCall(function(array) { - assert.deepStrictEqual(array, [false, 0, '']); - }))); + }) + r._read = common.mustNotCall() + r.push(false) + r.push(0) + r.push('') + r.push(null) + r.pipe( + toArray( + common.mustCall(function (array) { + assert.deepStrictEqual(array, [false, 0, '']) + }) + ) + ) } - { // Verify high watermark _read() behavior const r = new Readable({ highWaterMark: 6, objectMode: true - }); - let calls = 0; - const list = ['1', '2', '3', '4', '5', '6', '7', '8']; - - r._read = function(n) { - calls++; - }; - - list.forEach(function(c) { - r.push(c); - }); - - const v = r.read(); + }) + let calls = 0 + const list = ['1', '2', '3', '4', '5', '6', '7', '8'] - assert.strictEqual(calls, 0); - assert.strictEqual(v, '1'); - - const v2 = r.read(); - assert.strictEqual(v2, '2'); - - const v3 = r.read(); - assert.strictEqual(v3, '3'); + r._read = function (n) { + calls++ + } - assert.strictEqual(calls, 1); + list.forEach(function (c) { + r.push(c) + }) + const v = r.read() + assert.strictEqual(calls, 0) + assert.strictEqual(v, '1') + const v2 = r.read() + assert.strictEqual(v2, '2') + const v3 = r.read() + assert.strictEqual(v3, '3') + assert.strictEqual(calls, 1) } - { // Verify high watermark push behavior const r = new Readable({ highWaterMark: 6, objectMode: true - }); - r._read = common.mustNotCall(); + }) + r._read = common.mustNotCall() + for (let i = 0; i < 6; i++) { - const bool = r.push(i); - assert.strictEqual(bool, i !== 5); + const bool = r.push(i) + assert.strictEqual(bool, i !== 5) } } - { // Verify that objects can be written to stream - const w = new Writable({ objectMode: true }); + const w = new Writable({ + objectMode: true + }) - w._write = function(chunk, encoding, cb) { - assert.deepStrictEqual(chunk, { foo: 'bar' }); - cb(); - }; + w._write = function (chunk, encoding, cb) { + assert.deepStrictEqual(chunk, { + foo: 'bar' + }) + cb() + } - w.on('finish', common.mustCall()); - w.write({ foo: 'bar' }); - w.end(); + w.on('finish', common.mustCall()) + w.write({ + foo: 'bar' + }) + w.end() } - { // Verify that multiple objects can be written to stream - const w = new Writable({ objectMode: true }); - const list = []; - - w._write = function(chunk, encoding, cb) { - list.push(chunk); - cb(); - }; + const w = new Writable({ + objectMode: true + }) + const list = [] - w.on('finish', common.mustCall(function() { - assert.deepStrictEqual(list, [0, 1, 2, 3, 4]); - })); + w._write = function (chunk, encoding, cb) { + list.push(chunk) + cb() + } - w.write(0); - w.write(1); - w.write(2); - w.write(3); - w.write(4); - w.end(); + w.on( + 'finish', + common.mustCall(function () { + assert.deepStrictEqual(list, [0, 1, 2, 3, 4]) + }) + ) + w.write(0) + w.write(1) + w.write(2) + w.write(3) + w.write(4) + w.end() } - { // Verify that strings can be written as objects const w = new Writable({ objectMode: true - }); - const list = []; - - w._write = function(chunk, encoding, cb) { - list.push(chunk); - process.nextTick(cb); - }; + }) + const list = [] - w.on('finish', common.mustCall(function() { - assert.deepStrictEqual(list, ['0', '1', '2', '3', '4']); - })); + w._write = function (chunk, encoding, cb) { + list.push(chunk) + process.nextTick(cb) + } - w.write('0'); - w.write('1'); - w.write('2'); - w.write('3'); - w.write('4'); - w.end(); + w.on( + 'finish', + common.mustCall(function () { + assert.deepStrictEqual(list, ['0', '1', '2', '3', '4']) + }) + ) + w.write('0') + w.write('1') + w.write('2') + w.write('3') + w.write('4') + w.end() } - { // Verify that stream buffers finish until callback is called const w = new Writable({ objectMode: true - }); - let called = false; - - w._write = function(chunk, encoding, cb) { - assert.strictEqual(chunk, 'foo'); - - process.nextTick(function() { - called = true; - cb(); - }); - }; - - w.on('finish', common.mustCall(function() { - assert.strictEqual(called, true); - })); + }) + let called = false + + w._write = function (chunk, encoding, cb) { + assert.strictEqual(chunk, 'foo') + process.nextTick(function () { + called = true + cb() + }) + } - w.write('foo'); - w.end(); + w.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(called, true) + }) + ) + w.write('foo') + w.end() } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js index 824626e5d5..f6fc74204b 100644 --- a/test/parallel/test-stream2-pipe-error-handling.js +++ b/test/parallel/test-stream2-pipe-error-handling.js @@ -18,104 +18,109 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') { - let count = 1000; - - const source = new stream.Readable(); - source._read = function(n) { - n = Math.min(count, n); - count -= n; - source.push(Buffer.allocUnsafe(n)); - }; - - let unpipedDest; - source.unpipe = function(dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; - - const dest = new stream.Writable(); - dest._write = function(chunk, encoding, cb) { - cb(); - }; - - source.pipe(dest); - - let gotErr = null; - dest.on('error', function(err) { - gotErr = err; - }); - - let unpipedSource; - dest.on('unpipe', function(src) { - unpipedSource = src; - }); - - const err = new Error('This stream turned into bacon.'); - dest.emit('error', err); - assert.strictEqual(gotErr, err); - assert.strictEqual(unpipedSource, source); - assert.strictEqual(unpipedDest, dest); -} + let count = 1000 + const source = new stream.Readable() + + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.allocUnsafe(n)) + } + + let unpipedDest + + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + const dest = new stream.Writable() + + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + let gotErr = null + dest.on('error', function (err) { + gotErr = err + }) + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + const err = new Error('This stream turned into bacon.') + dest.emit('error', err) + assert.strictEqual(gotErr, err) + assert.strictEqual(unpipedSource, source) + assert.strictEqual(unpipedDest, dest) +} { - let count = 1000; + let count = 1000 + const source = new stream.Readable() - const source = new stream.Readable(); - source._read = function(n) { - n = Math.min(count, n); - count -= n; - source.push(Buffer.allocUnsafe(n)); - }; + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.allocUnsafe(n)) + } - let unpipedDest; - source.unpipe = function(dest) { - unpipedDest = dest; - stream.Readable.prototype.unpipe.call(this, dest); - }; + let unpipedDest - const dest = new stream.Writable({ autoDestroy: false }); - dest._write = function(chunk, encoding, cb) { - cb(); - }; + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } - source.pipe(dest); + const dest = new stream.Writable({ + autoDestroy: false + }) - let unpipedSource; - dest.on('unpipe', function(src) { - unpipedSource = src; - }); + dest._write = function (chunk, encoding, cb) { + cb() + } - const err = new Error('This stream turned into bacon.'); + source.pipe(dest) + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + const err = new Error('This stream turned into bacon.') + let gotErr = null - let gotErr = null; try { - dest.emit('error', err); + dest.emit('error', err) } catch (e) { - gotErr = e; + gotErr = e } - assert.strictEqual(gotErr, err); - assert.strictEqual(unpipedSource, source); - assert.strictEqual(unpipedDest, dest); + + assert.strictEqual(gotErr, err) + assert.strictEqual(unpipedSource, source) + assert.strictEqual(unpipedDest, dest) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js index 9ea1f60850..0158b0544f 100644 --- a/test/parallel/test-stream2-pipe-error-once-listener.js +++ b/test/parallel/test-stream2-pipe-error-once-listener.js @@ -18,51 +18,50 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; + error() {} +} +require('../common') -require('../common'); -const stream = require('../../lib/ours/index'); +const stream = require('../../lib/ours/index') class Read extends stream.Readable { _read(size) { - this.push('x'); - this.push(null); + this.push('x') + this.push(null) } } class Write extends stream.Writable { _write(buffer, encoding, cb) { - this.emit('error', new Error('boom')); - this.emit('alldone'); + this.emit('error', new Error('boom')) + this.emit('alldone') } } -const read = new Read(); -const write = new Write(); - -write.once('error', () => {}); -write.once('alldone', function(err) { - silentConsole.log('ok'); -}); - -process.on('exit', function(c) { - silentConsole.error('error thrown even with listener'); -}); +const read = new Read() +const write = new Write() +write.once('error', () => {}) +write.once('alldone', function (err) { + silentConsole.log('ok') +}) +process.on('exit', function (c) { + silentConsole.error('error thrown even with listener') +}) +read.pipe(write) +/* replacement start */ -read.pipe(write); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js index 13271d8cf0..4c78fe445a 100644 --- a/test/parallel/test-stream2-push.js +++ b/test/parallel/test-stream2-push.js @@ -18,134 +18,123 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); + error() {} +} +require('../common') -const EE = require('events').EventEmitter; +const assert = require('assert') +const { Readable, Writable } = require('../../lib/ours/index') -// A mock thing a bit like the net.Socket/tcp_wrap.handle interaction +const EE = require('events').EventEmitter // A mock thing a bit like the net.Socket/tcp_wrap.handle interaction const stream = new Readable({ highWaterMark: 16, encoding: 'utf8' -}); - -const source = new EE(); - -stream._read = function() { - silentConsole.error('stream._read'); - readStart(); -}; - -let ended = false; -stream.on('end', function() { - ended = true; -}); +}) +const source = new EE() -source.on('data', function(chunk) { - const ret = stream.push(chunk); - silentConsole.error('data', stream.readableLength); - if (!ret) - readStop(); -}); - -source.on('end', function() { - stream.push(null); -}); +stream._read = function () { + silentConsole.error('stream._read') + readStart() +} -let reading = false; +let ended = false +stream.on('end', function () { + ended = true +}) +source.on('data', function (chunk) { + const ret = stream.push(chunk) + silentConsole.error('data', stream.readableLength) + if (!ret) readStop() +}) +source.on('end', function () { + stream.push(null) +}) +let reading = false function readStart() { - silentConsole.error('readStart'); - reading = true; + silentConsole.error('readStart') + reading = true } function readStop() { - silentConsole.error('readStop'); - reading = false; - process.nextTick(function() { - const r = stream.read(); - if (r !== null) - writer.write(r); - }); + silentConsole.error('readStop') + reading = false + process.nextTick(function () { + const r = stream.read() + if (r !== null) writer.write(r) + }) } const writer = new Writable({ decodeStrings: false -}); - -const written = []; - -const expectWritten = - [ 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg', - 'asdfgasdfgasdfgasdfg' ]; - -writer._write = function(chunk, encoding, cb) { - silentConsole.error(`WRITE ${chunk}`); - written.push(chunk); - process.nextTick(cb); -}; - -writer.on('finish', finish); - +}) +const written = [] +const expectWritten = [ + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg', + 'asdfgasdfgasdfgasdfg' +] + +writer._write = function (chunk, encoding, cb) { + silentConsole.error(`WRITE ${chunk}`) + written.push(chunk) + process.nextTick(cb) +} -// Now emit some chunks. +writer.on('finish', finish) // Now emit some chunks. -const chunk = 'asdfg'; +const chunk = 'asdfg' +let set = 0 +readStart() +data() -let set = 0; -readStart(); -data(); function data() { - assert(reading); - source.emit('data', chunk); - assert(reading); - source.emit('data', chunk); - assert(reading); - source.emit('data', chunk); - assert(reading); - source.emit('data', chunk); - assert(!reading); - if (set++ < 5) - setTimeout(data, 10); - else - end(); + assert(reading) + source.emit('data', chunk) + assert(reading) + source.emit('data', chunk) + assert(reading) + source.emit('data', chunk) + assert(reading) + source.emit('data', chunk) + assert(!reading) + if (set++ < 5) setTimeout(data, 10) + else end() } function finish() { - silentConsole.error('finish'); - assert.deepStrictEqual(written, expectWritten); - silentConsole.log('ok'); + silentConsole.error('finish') + assert.deepStrictEqual(written, expectWritten) + silentConsole.log('ok') } function end() { - source.emit('end'); - assert(!reading); - writer.end(stream.read()); - setImmediate(function() { - assert(ended); - }); + source.emit('end') + assert(!reading) + writer.end(stream.read()) + setImmediate(function () { + assert(ended) + }) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js index 30aac079c5..099a4729b7 100644 --- a/test/parallel/test-stream2-read-sync-stack.js +++ b/test/parallel/test-stream2-read-sync-stack.js @@ -18,44 +18,42 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const Readable = require('../../lib/ours/index').Readable; + error() {} +} +const common = require('../common') -// This tests synchronous read callbacks and verifies that even if they nest +const Readable = require('../../lib/ours/index').Readable // This tests synchronous read callbacks and verifies that even if they nest // heavily the process handles it without an error -const r = new Readable(); -const N = 256 * 1024; +const r = new Readable() +const N = 256 * 1024 +let reads = 0 -let reads = 0; -r._read = function(n) { - const chunk = reads++ === N ? null : Buffer.allocUnsafe(1); - r.push(chunk); -}; +r._read = function (n) { + const chunk = reads++ === N ? null : Buffer.allocUnsafe(1) + r.push(chunk) +} r.on('readable', function onReadable() { - if (!(r.readableLength % 256)) - silentConsole.error('readable', r.readableLength); - r.read(N * 2); -}); - -r.on('end', common.mustCall()); - -r.read(0); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + if (!(r.readableLength % 256)) silentConsole.error('readable', r.readableLength) + r.read(N * 2) +}) +r.on('end', common.mustCall()) +r.read(0) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js index 4ce5f9cfeb..eda7613712 100644 --- a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js +++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js @@ -18,25 +18,26 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') -const Readable = require('../../lib/ours/index').Readable; +const assert = require('assert') -test1(); -test2(); +const Readable = require('../../lib/ours/index').Readable -function test1() { - const r = new Readable(); +test1() +test2() - // Should not end when we get a Buffer.alloc(0) or '' as the _read +function test1() { + const r = new Readable() // Should not end when we get a Buffer.alloc(0) or '' as the _read // result that just means that there is *temporarily* no data, but to // go ahead and try again later. // @@ -46,87 +47,101 @@ function test1() { // r.read(0) again later, otherwise there is no more work being done // and the process just exits. - const buf = Buffer.alloc(5, 'x'); - let reads = 5; - r._read = function(n) { + const buf = Buffer.alloc(5, 'x') + let reads = 5 + + r._read = function (n) { switch (reads--) { case 5: return setImmediate(() => { - return r.push(buf); - }); + return r.push(buf) + }) + case 4: setImmediate(() => { - return r.push(Buffer.alloc(0)); - }); - return setImmediate(r.read.bind(r, 0)); + return r.push(Buffer.alloc(0)) + }) + return setImmediate(r.read.bind(r, 0)) + case 3: - setImmediate(r.read.bind(r, 0)); + setImmediate(r.read.bind(r, 0)) return process.nextTick(() => { - return r.push(Buffer.alloc(0)); - }); + return r.push(Buffer.alloc(0)) + }) + case 2: - setImmediate(r.read.bind(r, 0)); - return r.push(Buffer.alloc(0)); // Not-EOF! + setImmediate(r.read.bind(r, 0)) + return r.push(Buffer.alloc(0)) + // Not-EOF! + case 1: - return r.push(buf); + return r.push(buf) + case 0: - return r.push(null); // EOF + return r.push(null) + // EOF + default: - throw new Error('unreachable'); + throw new Error('unreachable') } - }; + } + + const results = [] - const results = []; function flow() { - let chunk; - while (null !== (chunk = r.read())) - results.push(String(chunk)); + let chunk + + while (null !== (chunk = r.read())) results.push(String(chunk)) } - r.on('readable', flow); - r.on('end', () => { - results.push('EOF'); - }); - flow(); + r.on('readable', flow) + r.on('end', () => { + results.push('EOF') + }) + flow() process.on('exit', () => { - assert.deepStrictEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]); - silentConsole.log('ok'); - }); + assert.deepStrictEqual(results, ['xxxxx', 'xxxxx', 'EOF']) + silentConsole.log('ok') + }) } function test2() { - const r = new Readable({ encoding: 'base64' }); - let reads = 5; - r._read = function(n) { - if (!reads--) - return r.push(null); // EOF - return r.push(Buffer.from('x')); - }; - - const results = []; + const r = new Readable({ + encoding: 'base64' + }) + let reads = 5 + + r._read = function (n) { + if (!reads--) return r.push(null) // EOF + + return r.push(Buffer.from('x')) + } + + const results = [] + function flow() { - let chunk; - while (null !== (chunk = r.read())) - results.push(String(chunk)); + let chunk + + while (null !== (chunk = r.read())) results.push(String(chunk)) } - r.on('readable', flow); - r.on('end', () => { - results.push('EOF'); - }); - flow(); + r.on('readable', flow) + r.on('end', () => { + results.push('EOF') + }) + flow() process.on('exit', () => { - assert.deepStrictEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]); - silentConsole.log('ok'); - }); + assert.deepStrictEqual(results, ['eHh4', 'eHg=', 'EOF']) + silentConsole.log('ok') + }) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js index 119faea1e5..c883c05d29 100644 --- a/test/parallel/test-stream2-readable-from-list.js +++ b/test/parallel/test-stream2-readable-from-list.js @@ -18,95 +18,110 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // Flags: --expose-internals +'use strict' + +const tap = require('tap') + +const silentConsole = { + log() {}, + + error() {} +} +require('../common') + +const assert = require('assert') - 'use strict' +const fromList = require('../../lib/ours/index').Readable._fromList - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const fromList = require('../../lib/ours/index').Readable._fromList; -const BufferList = require('../../lib/internal/streams/buffer_list'); -const util = require('util'); +const BufferList = require('../../lib/internal/streams/buffer_list') + +const util = require('util') function bufferListFromArray(arr) { - const bl = new BufferList(); - for (let i = 0; i < arr.length; ++i) - bl.push(arr[i]); - return bl; + const bl = new BufferList() + + for (let i = 0; i < arr.length; ++i) bl.push(arr[i]) + + return bl } { // Verify behavior with buffers - let list = [ Buffer.from('foog'), - Buffer.from('bark'), - Buffer.from('bazy'), - Buffer.from('kuel') ]; - list = bufferListFromArray(list); - - - assert.strictEqual(typeof list.head, 'object'); - assert.strictEqual(typeof list.tail, 'object'); - assert.strictEqual(list.length, 4); - - - // Read more than the first element. - let ret = fromList(6, { buffer: list, length: 16 }); - assert.strictEqual(ret.toString(), 'foogba'); - - // Read exactly the first element. - ret = fromList(2, { buffer: list, length: 10 }); - assert.strictEqual(ret.toString(), 'rk'); - - // Read less than the first element. - ret = fromList(2, { buffer: list, length: 8 }); - assert.strictEqual(ret.toString(), 'ba'); - - // Read more than we have. - ret = fromList(100, { buffer: list, length: 6 }); - assert.strictEqual(ret.toString(), 'zykuel'); - - // all consumed. - assert.deepStrictEqual(list, new BufferList()); + let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] + list = bufferListFromArray(list) + assert.strictEqual(typeof list.head, 'object') + assert.strictEqual(typeof list.tail, 'object') + assert.strictEqual(list.length, 4) // Read more than the first element. + + let ret = fromList(6, { + buffer: list, + length: 16 + }) + assert.strictEqual(ret.toString(), 'foogba') // Read exactly the first element. + + ret = fromList(2, { + buffer: list, + length: 10 + }) + assert.strictEqual(ret.toString(), 'rk') // Read less than the first element. + + ret = fromList(2, { + buffer: list, + length: 8 + }) + assert.strictEqual(ret.toString(), 'ba') // Read more than we have. + + ret = fromList(100, { + buffer: list, + length: 6 + }) + assert.strictEqual(ret.toString(), 'zykuel') // all consumed. + + assert.deepStrictEqual(list, new BufferList()) } - { // Verify behavior with strings - let list = [ 'foog', - 'bark', - 'bazy', - 'kuel' ]; - list = bufferListFromArray(list); - - // Read more than the first element. - let ret = fromList(6, { buffer: list, length: 16, decoder: true }); - assert.strictEqual(ret, 'foogba'); - - // Read exactly the first element. - ret = fromList(2, { buffer: list, length: 10, decoder: true }); - assert.strictEqual(ret, 'rk'); - - // Read less than the first element. - ret = fromList(2, { buffer: list, length: 8, decoder: true }); - assert.strictEqual(ret, 'ba'); - - // Read more than we have. - ret = fromList(100, { buffer: list, length: 6, decoder: true }); - assert.strictEqual(ret, 'zykuel'); - - // all consumed. - assert.deepStrictEqual(list, new BufferList()); + let list = ['foog', 'bark', 'bazy', 'kuel'] + list = bufferListFromArray(list) // Read more than the first element. + + let ret = fromList(6, { + buffer: list, + length: 16, + decoder: true + }) + assert.strictEqual(ret, 'foogba') // Read exactly the first element. + + ret = fromList(2, { + buffer: list, + length: 10, + decoder: true + }) + assert.strictEqual(ret, 'rk') // Read less than the first element. + + ret = fromList(2, { + buffer: list, + length: 8, + decoder: true + }) + assert.strictEqual(ret, 'ba') // Read more than we have. + + ret = fromList(100, { + buffer: list, + length: 6, + decoder: true + }) + assert.strictEqual(ret, 'zykuel') // all consumed. + + assert.deepStrictEqual(list, new BufferList()) } - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js index ced1a808c9..a90325aa34 100644 --- a/test/parallel/test-stream2-readable-legacy-drain.js +++ b/test/parallel/test-stream2-readable-legacy-drain.js @@ -18,53 +18,56 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); + error() {} +} +const common = require('../common') -const Stream = require('../../lib/ours/index'); -const Readable = Stream.Readable; +const assert = require('assert') -const r = new Readable(); -const N = 256; -let reads = 0; -r._read = function(n) { - return r.push(++reads === N ? null : Buffer.allocUnsafe(1)); -}; +const Stream = require('../../lib/ours/index') -r.on('end', common.mustCall()); +const Readable = Stream.Readable +const r = new Readable() +const N = 256 +let reads = 0 -const w = new Stream(); -w.writable = true; -let buffered = 0; -w.write = function(c) { - buffered += c.length; - process.nextTick(drain); - return false; -}; +r._read = function (n) { + return r.push(++reads === N ? null : Buffer.allocUnsafe(1)) +} -function drain() { - assert(buffered <= 3); - buffered = 0; - w.emit('drain'); +r.on('end', common.mustCall()) +const w = new Stream() +w.writable = true +let buffered = 0 + +w.write = function (c) { + buffered += c.length + process.nextTick(drain) + return false } -w.end = common.mustCall(); +function drain() { + assert(buffered <= 3) + buffered = 0 + w.emit('drain') +} -r.pipe(w); +w.end = common.mustCall() +r.pipe(w) +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js index 051da2eea7..bf4245d5fd 100644 --- a/test/parallel/test-stream2-readable-non-empty-end.js +++ b/test/parallel/test-stream2-readable-non-empty-end.js @@ -18,70 +18,78 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable } = require('../../lib/ours/index') + +let len = 0 +const chunks = new Array(10) -let len = 0; -const chunks = new Array(10); for (let i = 1; i <= 10; i++) { - chunks[i - 1] = Buffer.allocUnsafe(i); - len += i; + chunks[i - 1] = Buffer.allocUnsafe(i) + len += i } -const test = new Readable(); -let n = 0; -test._read = function(size) { - const chunk = chunks[n++]; - setTimeout(function() { - test.push(chunk === undefined ? null : chunk); - }, 1); -}; +const test = new Readable() +let n = 0 + +test._read = function (size) { + const chunk = chunks[n++] + setTimeout(function () { + test.push(chunk === undefined ? null : chunk) + }, 1) +} + +test.on('end', thrower) -test.on('end', thrower); function thrower() { - throw new Error('this should not happen!'); + throw new Error('this should not happen!') } -let bytesread = 0; -test.on('readable', function() { - const b = len - bytesread - 1; - const res = test.read(b); +let bytesread = 0 +test.on('readable', function () { + const b = len - bytesread - 1 + const res = test.read(b) + if (res) { - bytesread += res.length; - silentConsole.error(`br=${bytesread} len=${len}`); - setTimeout(next, 1); + bytesread += res.length + silentConsole.error(`br=${bytesread} len=${len}`) + setTimeout(next, 1) } - test.read(0); -}); -test.read(0); + + test.read(0) +}) +test.read(0) function next() { // Now let's make 'end' happen - test.removeListener('end', thrower); - test.on('end', common.mustCall()); - - // One to get the last byte - let r = test.read(); - assert(r); - assert.strictEqual(r.length, 1); - r = test.read(); - assert.strictEqual(r, null); + test.removeListener('end', thrower) + test.on('end', common.mustCall()) // One to get the last byte + + let r = test.read() + assert(r) + assert.strictEqual(r.length, 1) + r = test.read() + assert.strictEqual(r, null) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap-destroy.js b/test/parallel/test-stream2-readable-wrap-destroy.js index 77dd8e34f0..b2c615b65d 100644 --- a/test/parallel/test-stream2-readable-wrap-destroy.js +++ b/test/parallel/test-stream2-readable-wrap-destroy.js @@ -1,42 +1,45 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const { Readable } = require('../../lib/ours/index'); -const EE = require('events').EventEmitter; + error() {} +} +const common = require('../common') + +const { Readable } = require('../../lib/ours/index') + +const EE = require('events').EventEmitter -const oldStream = new EE(); -oldStream.pause = () => {}; -oldStream.resume = () => {}; +const oldStream = new EE() + +oldStream.pause = () => {} + +oldStream.resume = () => {} { new Readable({ autoDestroy: false, destroy: common.mustCall() - }) - .wrap(oldStream); - oldStream.emit('destroy'); + }).wrap(oldStream) + oldStream.emit('destroy') } - { new Readable({ autoDestroy: false, destroy: common.mustCall() - }) - .wrap(oldStream); - oldStream.emit('close'); + }).wrap(oldStream) + oldStream.emit('close') } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js index 1b05e77cb2..3b23b719a4 100644 --- a/test/parallel/test-stream2-readable-wrap-empty.js +++ b/test/parallel/test-stream2-readable-wrap-empty.js @@ -18,36 +18,37 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); + error() {} +} +const common = require('../common') -const { Readable } = require('../../lib/ours/index'); -const EE = require('events').EventEmitter; +const { Readable } = require('../../lib/ours/index') -const oldStream = new EE(); -oldStream.pause = () => {}; -oldStream.resume = () => {}; +const EE = require('events').EventEmitter -const newStream = new Readable().wrap(oldStream); +const oldStream = new EE() -newStream - .on('readable', () => {}) - .on('end', common.mustCall()); +oldStream.pause = () => {} -oldStream.emit('end'); +oldStream.resume = () => {} - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +const newStream = new Readable().wrap(oldStream) +newStream.on('readable', () => {}).on('end', common.mustCall()) +oldStream.emit('end') +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap-error.js b/test/parallel/test-stream2-readable-wrap-error.js index ecf6d28a0d..80c4e37c0e 100644 --- a/test/parallel/test-stream2-readable-wrap-error.js +++ b/test/parallel/test-stream2-readable-wrap-error.js @@ -1,52 +1,67 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); +const silentConsole = { + log() {}, -const { Readable } = require('../../lib/ours/index'); -const EE = require('events').EventEmitter; + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable } = require('../../lib/ours/index') + +const EE = require('events').EventEmitter class LegacyStream extends EE { pause() {} + resume() {} } { - const err = new Error(); - const oldStream = new LegacyStream(); - const r = new Readable({ autoDestroy: true }) + const err = new Error() + const oldStream = new LegacyStream() + const r = new Readable({ + autoDestroy: true + }) .wrap(oldStream) - .on('error', common.mustCall(() => { - assert.strictEqual(r._readableState.errorEmitted, true); - assert.strictEqual(r._readableState.errored, err); - assert.strictEqual(r.destroyed, true); - })); - oldStream.emit('error', err); + .on( + 'error', + common.mustCall(() => { + assert.strictEqual(r._readableState.errorEmitted, true) + assert.strictEqual(r._readableState.errored, err) + assert.strictEqual(r.destroyed, true) + }) + ) + oldStream.emit('error', err) } - { - const err = new Error(); - const oldStream = new LegacyStream(); - const r = new Readable({ autoDestroy: false }) + const err = new Error() + const oldStream = new LegacyStream() + const r = new Readable({ + autoDestroy: false + }) .wrap(oldStream) - .on('error', common.mustCall(() => { - assert.strictEqual(r._readableState.errorEmitted, true); - assert.strictEqual(r._readableState.errored, err); - assert.strictEqual(r.destroyed, false); - })); - oldStream.emit('error', err); + .on( + 'error', + common.mustCall(() => { + assert.strictEqual(r._readableState.errorEmitted, true) + assert.strictEqual(r._readableState.errored, err) + assert.strictEqual(r.destroyed, false) + }) + ) + oldStream.emit('error', err) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js index 2d127c5104..b56629e39d 100644 --- a/test/parallel/test-stream2-readable-wrap.js +++ b/test/parallel/test-stream2-readable-wrap.js @@ -18,98 +18,131 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable, Writable } = require('../../lib/ours/index'); -const EE = require('events').EventEmitter; + error() {} +} +const common = require('../common') -function runTest(highWaterMark, objectMode, produce) { +const assert = require('assert') - const old = new EE(); - const r = new Readable({ highWaterMark, objectMode }); - assert.strictEqual(r, r.wrap(old)); +const { Readable, Writable } = require('../../lib/ours/index') - r.on('end', common.mustCall()); +const EE = require('events').EventEmitter - old.pause = function() { - old.emit('pause'); - flowing = false; - }; +function runTest(highWaterMark, objectMode, produce) { + const old = new EE() + const r = new Readable({ + highWaterMark, + objectMode + }) + assert.strictEqual(r, r.wrap(old)) + r.on('end', common.mustCall()) + + old.pause = function () { + old.emit('pause') + flowing = false + } - old.resume = function() { - old.emit('resume'); - flow(); - }; + old.resume = function () { + old.emit('resume') + flow() + } // Make sure pause is only emitted once. - // Make sure pause is only emitted once. - let pausing = false; + let pausing = false r.on('pause', () => { - assert.strictEqual(pausing, false); - pausing = true; + assert.strictEqual(pausing, false) + pausing = true process.nextTick(() => { - pausing = false; - }); - }); - - let flowing; - let chunks = 10; - let oldEnded = false; - const expected = []; + pausing = false + }) + }) + let flowing + let chunks = 10 + let oldEnded = false + const expected = [] + function flow() { - flowing = true; + flowing = true + while (flowing && chunks-- > 0) { - const item = produce(); - expected.push(item); - old.emit('data', item); + const item = produce() + expected.push(item) + old.emit('data', item) } + if (chunks <= 0) { - oldEnded = true; - old.emit('end'); + oldEnded = true + old.emit('end') } } - const w = new Writable({ highWaterMark: highWaterMark * 2, - objectMode }); - const written = []; - w._write = function(chunk, encoding, cb) { - written.push(chunk); - setTimeout(cb, 1); - }; + const w = new Writable({ + highWaterMark: highWaterMark * 2, + objectMode + }) + const written = [] - w.on('finish', common.mustCall(function() { - performAsserts(); - })); - - r.pipe(w); + w._write = function (chunk, encoding, cb) { + written.push(chunk) + setTimeout(cb, 1) + } - flow(); + w.on( + 'finish', + common.mustCall(function () { + performAsserts() + }) + ) + r.pipe(w) + flow() function performAsserts() { - assert(oldEnded); - assert.deepStrictEqual(written, expected); + assert(oldEnded) + assert.deepStrictEqual(written, expected) } } -runTest(100, false, function() { return Buffer.allocUnsafe(100); }); -runTest(10, false, function() { return Buffer.from('xxxxxxxxxx'); }); -runTest(1, true, function() { return { foo: 'bar' }; }); - -const objectChunks = [ 5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555 ]; -runTest(1, true, function() { return objectChunks.shift(); }); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +runTest(100, false, function () { + return Buffer.allocUnsafe(100) +}) +runTest(10, false, function () { + return Buffer.from('xxxxxxxxxx') +}) +runTest(1, true, function () { + return { + foo: 'bar' + } +}) +const objectChunks = [ + 5, + 'a', + false, + 0, + '', + 'xyz', + { + x: 4 + }, + 7, + [], + 555 +] +runTest(1, true, function () { + return objectChunks.shift() +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js index 046791a5c8..59194e4bda 100644 --- a/test/parallel/test-stream2-set-encoding.js +++ b/test/parallel/test-stream2-set-encoding.js @@ -18,321 +18,339 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { Readable: R } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { Readable: R } = require('../../lib/ours/index') class TestReader extends R { constructor(n, opts) { - super(opts); - this.pos = 0; - this.len = n || 100; + super(opts) + this.pos = 0 + this.len = n || 100 } _read(n) { setTimeout(() => { if (this.pos >= this.len) { // Double push(null) to test eos handling - this.push(null); - return this.push(null); + this.push(null) + return this.push(null) } - n = Math.min(n, this.len - this.pos); + n = Math.min(n, this.len - this.pos) + if (n <= 0) { // Double push(null) to test eos handling - this.push(null); - return this.push(null); + this.push(null) + return this.push(null) } - this.pos += n; - const ret = Buffer.alloc(n, 'a'); - - return this.push(ret); - }, 1); + this.pos += n + const ret = Buffer.alloc(n, 'a') + return this.push(ret) + }, 1) } } { // Verify utf8 encoding - const tr = new TestReader(100); - tr.setEncoding('utf8'); - const out = []; - const expect = - [ 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' ]; - + const tr = new TestReader(100) + tr.setEncoding('utf8') + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(10))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - - { // Verify hex encoding - const tr = new TestReader(100); - tr.setEncoding('hex'); - const out = []; - const expect = - [ '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' ]; - + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(10))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - { // Verify hex encoding with read(13) - const tr = new TestReader(100); - tr.setEncoding('hex'); - const out = []; - const expect = - [ '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' ]; - + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(13))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(13))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - { // Verify base64 encoding - const tr = new TestReader(100); - tr.setEncoding('base64'); - const out = []; - const expect = - [ 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' ]; - + const tr = new TestReader(100) + tr.setEncoding('base64') + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(10))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - { // Verify utf8 encoding - const tr = new TestReader(100, { encoding: 'utf8' }); - const out = []; - const expect = - [ 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' ]; - + const tr = new TestReader(100, { + encoding: 'utf8' + }) + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(10))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - - { // Verify hex encoding - const tr = new TestReader(100, { encoding: 'hex' }); - const out = []; - const expect = - [ '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' ]; - + const tr = new TestReader(100, { + encoding: 'hex' + }) + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(10))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - { // Verify hex encoding with read(13) - const tr = new TestReader(100, { encoding: 'hex' }); - const out = []; - const expect = - [ '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' ]; - + const tr = new TestReader(100, { + encoding: 'hex' + }) + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(13))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(13))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - { // Verify base64 encoding - const tr = new TestReader(100, { encoding: 'base64' }); - const out = []; - const expect = - [ 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' ]; - + const tr = new TestReader(100, { + encoding: 'base64' + }) + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] tr.on('readable', function flow() { - let chunk; - while (null !== (chunk = tr.read(10))) - out.push(chunk); - }); - - tr.on('end', common.mustCall(function() { - assert.deepStrictEqual(out, expect); - })); + let chunk + + while (null !== (chunk = tr.read(10))) out.push(chunk) + }) + tr.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(out, expect) + }) + ) } - { // Verify chaining behavior - const tr = new TestReader(100); - assert.deepStrictEqual(tr.setEncoding('utf8'), tr); + const tr = new TestReader(100) + assert.deepStrictEqual(tr.setEncoding('utf8'), tr) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js index 872daf5062..ac91527cdd 100644 --- a/test/parallel/test-stream2-transform.js +++ b/test/parallel/test-stream2-transform.js @@ -18,468 +18,496 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); -const assert = require('assert'); -const { PassThrough, Transform } = require('../../lib/ours/index'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const { PassThrough, Transform } = require('../../lib/ours/index') { // Verify writable side consumption const tx = new Transform({ highWaterMark: 10 - }); + }) + let transformed = 0 - let transformed = 0; - tx._transform = function(chunk, encoding, cb) { - transformed += chunk.length; - tx.push(chunk); - cb(); - }; + tx._transform = function (chunk, encoding, cb) { + transformed += chunk.length + tx.push(chunk) + cb() + } for (let i = 1; i <= 10; i++) { - tx.write(Buffer.allocUnsafe(i)); + tx.write(Buffer.allocUnsafe(i)) } - tx.end(); - assert.strictEqual(tx.readableLength, 10); - assert.strictEqual(transformed, 10); - assert.deepStrictEqual(tx.writableBuffer.map(function(c) { - return c.chunk.length; - }), [5, 6, 7, 8, 9, 10]); + tx.end() + assert.strictEqual(tx.readableLength, 10) + assert.strictEqual(transformed, 10) + assert.deepStrictEqual( + tx.writableBuffer.map(function (c) { + return c.chunk.length + }), + [5, 6, 7, 8, 9, 10] + ) } - { // Verify passthrough behavior - const pt = new PassThrough(); - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - assert.strictEqual(pt.read(5).toString(), 'foogb'); - assert.strictEqual(pt.read(5).toString(), 'arkba'); - assert.strictEqual(pt.read(5).toString(), 'zykue'); - assert.strictEqual(pt.read(5).toString(), 'l'); + const pt = new PassThrough() + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + assert.strictEqual(pt.read(5).toString(), 'foogb') + assert.strictEqual(pt.read(5).toString(), 'arkba') + assert.strictEqual(pt.read(5).toString(), 'zykue') + assert.strictEqual(pt.read(5).toString(), 'l') } - { // Verify object passthrough behavior - const pt = new PassThrough({ objectMode: true }); - - pt.write(1); - pt.write(true); - pt.write(false); - pt.write(0); - pt.write('foo'); - pt.write(''); - pt.write({ a: 'b' }); - pt.end(); - - assert.strictEqual(pt.read(), 1); - assert.strictEqual(pt.read(), true); - assert.strictEqual(pt.read(), false); - assert.strictEqual(pt.read(), 0); - assert.strictEqual(pt.read(), 'foo'); - assert.strictEqual(pt.read(), ''); - assert.deepStrictEqual(pt.read(), { a: 'b' }); + const pt = new PassThrough({ + objectMode: true + }) + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ + a: 'b' + }) + pt.end() + assert.strictEqual(pt.read(), 1) + assert.strictEqual(pt.read(), true) + assert.strictEqual(pt.read(), false) + assert.strictEqual(pt.read(), 0) + assert.strictEqual(pt.read(), 'foo') + assert.strictEqual(pt.read(), '') + assert.deepStrictEqual(pt.read(), { + a: 'b' + }) } - { // Verify passthrough constructor behavior - const pt = PassThrough(); - - assert(pt instanceof PassThrough); + const pt = PassThrough() + assert(pt instanceof PassThrough) } - { // Verify transform constructor behavior - const pt = Transform(); - - assert(pt instanceof Transform); + const pt = Transform() + assert(pt instanceof Transform) } - { // Perform a simple transform - const pt = new Transform(); - pt._transform = function(c, e, cb) { - const ret = Buffer.alloc(c.length, 'x'); - pt.push(ret); - cb(); - }; - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - assert.strictEqual(pt.read(5).toString(), 'xxxxx'); - assert.strictEqual(pt.read(5).toString(), 'xxxxx'); - assert.strictEqual(pt.read(5).toString(), 'xxxxx'); - assert.strictEqual(pt.read(5).toString(), 'x'); -} + const pt = new Transform() + + pt._transform = function (c, e, cb) { + const ret = Buffer.alloc(c.length, 'x') + pt.push(ret) + cb() + } + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + assert.strictEqual(pt.read(5).toString(), 'xxxxx') + assert.strictEqual(pt.read(5).toString(), 'xxxxx') + assert.strictEqual(pt.read(5).toString(), 'xxxxx') + assert.strictEqual(pt.read(5).toString(), 'x') +} { // Verify simple object transform - const pt = new Transform({ objectMode: true }); - pt._transform = function(c, e, cb) { - pt.push(JSON.stringify(c)); - cb(); - }; - - pt.write(1); - pt.write(true); - pt.write(false); - pt.write(0); - pt.write('foo'); - pt.write(''); - pt.write({ a: 'b' }); - pt.end(); - - assert.strictEqual(pt.read(), '1'); - assert.strictEqual(pt.read(), 'true'); - assert.strictEqual(pt.read(), 'false'); - assert.strictEqual(pt.read(), '0'); - assert.strictEqual(pt.read(), '"foo"'); - assert.strictEqual(pt.read(), '""'); - assert.strictEqual(pt.read(), '{"a":"b"}'); -} + const pt = new Transform({ + objectMode: true + }) + + pt._transform = function (c, e, cb) { + pt.push(JSON.stringify(c)) + cb() + } + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ + a: 'b' + }) + pt.end() + assert.strictEqual(pt.read(), '1') + assert.strictEqual(pt.read(), 'true') + assert.strictEqual(pt.read(), 'false') + assert.strictEqual(pt.read(), '0') + assert.strictEqual(pt.read(), '"foo"') + assert.strictEqual(pt.read(), '""') + assert.strictEqual(pt.read(), '{"a":"b"}') +} { // Verify async passthrough - const pt = new Transform(); - pt._transform = function(chunk, encoding, cb) { - setTimeout(function() { - pt.push(chunk); - cb(); - }, 10); - }; - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - pt.on('finish', common.mustCall(function() { - assert.strictEqual(pt.read(5).toString(), 'foogb'); - assert.strictEqual(pt.read(5).toString(), 'arkba'); - assert.strictEqual(pt.read(5).toString(), 'zykue'); - assert.strictEqual(pt.read(5).toString(), 'l'); - })); -} + const pt = new Transform() + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + pt.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(pt.read(5).toString(), 'foogb') + assert.strictEqual(pt.read(5).toString(), 'arkba') + assert.strictEqual(pt.read(5).toString(), 'zykue') + assert.strictEqual(pt.read(5).toString(), 'l') + }) + ) +} { // Verify asymmetric transform (expand) - const pt = new Transform(); - - // Emit each chunk 2 times. - pt._transform = function(chunk, encoding, cb) { - setTimeout(function() { - pt.push(chunk); - setTimeout(function() { - pt.push(chunk); - cb(); - }, 10); - }, 10); - }; - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - pt.end(); - - pt.on('finish', common.mustCall(function() { - assert.strictEqual(pt.read(5).toString(), 'foogf'); - assert.strictEqual(pt.read(5).toString(), 'oogba'); - assert.strictEqual(pt.read(5).toString(), 'rkbar'); - assert.strictEqual(pt.read(5).toString(), 'kbazy'); - assert.strictEqual(pt.read(5).toString(), 'bazyk'); - assert.strictEqual(pt.read(5).toString(), 'uelku'); - assert.strictEqual(pt.read(5).toString(), 'el'); - })); -} + const pt = new Transform() // Emit each chunk 2 times. + + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + }, 10) + } + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + pt.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(pt.read(5).toString(), 'foogf') + assert.strictEqual(pt.read(5).toString(), 'oogba') + assert.strictEqual(pt.read(5).toString(), 'rkbar') + assert.strictEqual(pt.read(5).toString(), 'kbazy') + assert.strictEqual(pt.read(5).toString(), 'bazyk') + assert.strictEqual(pt.read(5).toString(), 'uelku') + assert.strictEqual(pt.read(5).toString(), 'el') + }) + ) +} { // Verify asymmetric transform (compress) - const pt = new Transform(); - - // Each output is the first char of 3 consecutive chunks, + const pt = new Transform() // Each output is the first char of 3 consecutive chunks, // or whatever's left. - pt.state = ''; - pt._transform = function(chunk, encoding, cb) { - if (!chunk) - chunk = ''; - const s = chunk.toString(); + pt.state = '' + + pt._transform = function (chunk, encoding, cb) { + if (!chunk) chunk = '' + const s = chunk.toString() setTimeout(() => { - this.state += s.charAt(0); + this.state += s.charAt(0) + if (this.state.length === 3) { - pt.push(Buffer.from(this.state)); - this.state = ''; + pt.push(Buffer.from(this.state)) + this.state = '' } - cb(); - }, 10); - }; - pt._flush = function(cb) { + cb() + }, 10) + } + + pt._flush = function (cb) { // Just output whatever we have. - pt.push(Buffer.from(this.state)); - this.state = ''; - cb(); - }; - - pt.write(Buffer.from('aaaa')); - pt.write(Buffer.from('bbbb')); - pt.write(Buffer.from('cccc')); - pt.write(Buffer.from('dddd')); - pt.write(Buffer.from('eeee')); - pt.write(Buffer.from('aaaa')); - pt.write(Buffer.from('bbbb')); - pt.write(Buffer.from('cccc')); - pt.write(Buffer.from('dddd')); - pt.write(Buffer.from('eeee')); - pt.write(Buffer.from('aaaa')); - pt.write(Buffer.from('bbbb')); - pt.write(Buffer.from('cccc')); - pt.write(Buffer.from('dddd')); - pt.end(); - - // 'abcdeabcdeabcd' - pt.on('finish', common.mustCall(function() { - assert.strictEqual(pt.read(5).toString(), 'abcde'); - assert.strictEqual(pt.read(5).toString(), 'abcde'); - assert.strictEqual(pt.read(5).toString(), 'abcd'); - })); -} + pt.push(Buffer.from(this.state)) + this.state = '' + cb() + } -// This tests for a stall when data is written to a full stream + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.end() // 'abcdeabcdeabcd' + + pt.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(pt.read(5).toString(), 'abcde') + assert.strictEqual(pt.read(5).toString(), 'abcde') + assert.strictEqual(pt.read(5).toString(), 'abcd') + }) + ) +} // This tests for a stall when data is written to a full stream // that has empty transforms. + { // Verify complex transform behavior - let count = 0; - let saved = null; - const pt = new Transform({ highWaterMark: 3 }); - pt._transform = function(c, e, cb) { - if (count++ === 1) - saved = c; + let count = 0 + let saved = null + const pt = new Transform({ + highWaterMark: 3 + }) + + pt._transform = function (c, e, cb) { + if (count++ === 1) saved = c else { if (saved) { - pt.push(saved); - saved = null; + pt.push(saved) + saved = null } - pt.push(c); + + pt.push(c) } + cb() + } - cb(); - }; - - pt.once('readable', function() { - process.nextTick(function() { - pt.write(Buffer.from('d')); - pt.write(Buffer.from('ef'), common.mustCall(function() { - pt.end(); - })); - assert.strictEqual(pt.read().toString(), 'abcdef'); - assert.strictEqual(pt.read(), null); - }); - }); - - pt.write(Buffer.from('abc')); + pt.once('readable', function () { + process.nextTick(function () { + pt.write(Buffer.from('d')) + pt.write( + Buffer.from('ef'), + common.mustCall(function () { + pt.end() + }) + ) + assert.strictEqual(pt.read().toString(), 'abcdef') + assert.strictEqual(pt.read(), null) + }) + }) + pt.write(Buffer.from('abc')) } - - { // Verify passthrough event emission - const pt = new PassThrough(); - let emits = 0; - pt.on('readable', function() { - emits++; - }); - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - - assert.strictEqual(emits, 0); - assert.strictEqual(pt.read(5).toString(), 'foogb'); - assert.strictEqual(String(pt.read(5)), 'null'); - assert.strictEqual(emits, 0); - - pt.write(Buffer.from('bazy')); - pt.write(Buffer.from('kuel')); - - assert.strictEqual(emits, 0); - assert.strictEqual(pt.read(5).toString(), 'arkba'); - assert.strictEqual(pt.read(5).toString(), 'zykue'); - assert.strictEqual(pt.read(5), null); - - pt.end(); - - assert.strictEqual(emits, 1); - assert.strictEqual(pt.read(5).toString(), 'l'); - assert.strictEqual(pt.read(5), null); - assert.strictEqual(emits, 1); + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + emits++ + }) + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + assert.strictEqual(emits, 0) + assert.strictEqual(pt.read(5).toString(), 'foogb') + assert.strictEqual(String(pt.read(5)), 'null') + assert.strictEqual(emits, 0) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + assert.strictEqual(emits, 0) + assert.strictEqual(pt.read(5).toString(), 'arkba') + assert.strictEqual(pt.read(5).toString(), 'zykue') + assert.strictEqual(pt.read(5), null) + pt.end() + assert.strictEqual(emits, 1) + assert.strictEqual(pt.read(5).toString(), 'l') + assert.strictEqual(pt.read(5), null) + assert.strictEqual(emits, 1) } - { // Verify passthrough event emission reordering - const pt = new PassThrough(); - let emits = 0; - pt.on('readable', function() { - emits++; - }); - - pt.write(Buffer.from('foog')); - pt.write(Buffer.from('bark')); - - assert.strictEqual(emits, 0); - assert.strictEqual(pt.read(5).toString(), 'foogb'); - assert.strictEqual(pt.read(5), null); - - pt.once('readable', common.mustCall(function() { - assert.strictEqual(pt.read(5).toString(), 'arkba'); - assert.strictEqual(pt.read(5), null); - - pt.once('readable', common.mustCall(function() { - assert.strictEqual(pt.read(5).toString(), 'zykue'); - assert.strictEqual(pt.read(5), null); - pt.once('readable', common.mustCall(function() { - assert.strictEqual(pt.read(5).toString(), 'l'); - assert.strictEqual(pt.read(5), null); - assert.strictEqual(emits, 3); - })); - pt.end(); - })); - pt.write(Buffer.from('kuel')); - })); - - pt.write(Buffer.from('bazy')); + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + emits++ + }) + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + assert.strictEqual(emits, 0) + assert.strictEqual(pt.read(5).toString(), 'foogb') + assert.strictEqual(pt.read(5), null) + pt.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(pt.read(5).toString(), 'arkba') + assert.strictEqual(pt.read(5), null) + pt.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(pt.read(5).toString(), 'zykue') + assert.strictEqual(pt.read(5), null) + pt.once( + 'readable', + common.mustCall(function () { + assert.strictEqual(pt.read(5).toString(), 'l') + assert.strictEqual(pt.read(5), null) + assert.strictEqual(emits, 3) + }) + ) + pt.end() + }) + ) + pt.write(Buffer.from('kuel')) + }) + ) + pt.write(Buffer.from('bazy')) } - { // Verify passthrough facade - const pt = new PassThrough(); - const datas = []; - pt.on('data', function(chunk) { - datas.push(chunk.toString()); - }); - - pt.on('end', common.mustCall(function() { - assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel']); - })); - - pt.write(Buffer.from('foog')); - setTimeout(function() { - pt.write(Buffer.from('bark')); - setTimeout(function() { - pt.write(Buffer.from('bazy')); - setTimeout(function() { - pt.write(Buffer.from('kuel')); - setTimeout(function() { - pt.end(); - }, 10); - }, 10); - }, 10); - }, 10); + const pt = new PassThrough() + const datas = [] + pt.on('data', function (chunk) { + datas.push(chunk.toString()) + }) + pt.on( + 'end', + common.mustCall(function () { + assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel']) + }) + ) + pt.write(Buffer.from('foog')) + setTimeout(function () { + pt.write(Buffer.from('bark')) + setTimeout(function () { + pt.write(Buffer.from('bazy')) + setTimeout(function () { + pt.write(Buffer.from('kuel')) + setTimeout(function () { + pt.end() + }, 10) + }, 10) + }, 10) + }, 10) } - { // Verify object transform (JSON parse) - const jp = new Transform({ objectMode: true }); - jp._transform = function(data, encoding, cb) { + const jp = new Transform({ + objectMode: true + }) + + jp._transform = function (data, encoding, cb) { try { - jp.push(JSON.parse(data)); - cb(); + jp.push(JSON.parse(data)) + cb() } catch (er) { - cb(er); + cb(er) } - }; - - // Anything except null/undefined is fine. + } // Anything except null/undefined is fine. // those are "magic" in the stream API, because they signal EOF. + const objects = [ - { foo: 'bar' }, + { + foo: 'bar' + }, 100, 'string', - { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }, - ]; - - let ended = false; - jp.on('end', function() { - ended = true; - }); - - objects.forEach(function(obj) { - jp.write(JSON.stringify(obj)); - const res = jp.read(); - assert.deepStrictEqual(res, obj); - }); - - jp.end(); - // Read one more time to get the 'end' event - jp.read(); - - process.nextTick(common.mustCall(function() { - assert.strictEqual(ended, true); - })); + { + nested: { + things: [ + { + foo: 'bar' + }, + 100, + 'string' + ] + } + } + ] + let ended = false + jp.on('end', function () { + ended = true + }) + objects.forEach(function (obj) { + jp.write(JSON.stringify(obj)) + const res = jp.read() + assert.deepStrictEqual(res, obj) + }) + jp.end() // Read one more time to get the 'end' event + + jp.read() + process.nextTick( + common.mustCall(function () { + assert.strictEqual(ended, true) + }) + ) } - { // Verify object transform (JSON stringify) - const js = new Transform({ objectMode: true }); - js._transform = function(data, encoding, cb) { + const js = new Transform({ + objectMode: true + }) + + js._transform = function (data, encoding, cb) { try { - js.push(JSON.stringify(data)); - cb(); + js.push(JSON.stringify(data)) + cb() } catch (er) { - cb(er); + cb(er) } - }; - - // Anything except null/undefined is fine. + } // Anything except null/undefined is fine. // those are "magic" in the stream API, because they signal EOF. + const objects = [ - { foo: 'bar' }, + { + foo: 'bar' + }, 100, 'string', - { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }, - ]; - - let ended = false; - js.on('end', function() { - ended = true; - }); - - objects.forEach(function(obj) { - js.write(obj); - const res = js.read(); - assert.strictEqual(res, JSON.stringify(obj)); - }); - - js.end(); - // Read one more time to get the 'end' event - js.read(); - - process.nextTick(common.mustCall(function() { - assert.strictEqual(ended, true); - })); + { + nested: { + things: [ + { + foo: 'bar' + }, + 100, + 'string' + ] + } + } + ] + let ended = false + js.on('end', function () { + ended = true + }) + objects.forEach(function (obj) { + js.write(obj) + const res = js.read() + assert.strictEqual(res, JSON.stringify(obj)) + }) + js.end() // Read one more time to get the 'end' event + + js.read() + process.nextTick( + common.mustCall(function () { + assert.strictEqual(ended, true) + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js index 53f0a32197..1d2c64e3a5 100644 --- a/test/parallel/test-stream2-unpipe-drain.js +++ b/test/parallel/test-stream2-unpipe-drain.js @@ -18,70 +18,65 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); + error() {} +} +require('../common') + +const assert = require('assert') -const stream = require('../../lib/ours/index'); +const stream = require('../../lib/ours/index') class TestWriter extends stream.Writable { _write(buffer, encoding, callback) { - silentConsole.log('write called'); - // Super slow write stream (callback never called) + silentConsole.log('write called') // Super slow write stream (callback never called) } } -const dest = new TestWriter(); +const dest = new TestWriter() class TestReader extends stream.Readable { constructor() { - super(); - this.reads = 0; + super() + this.reads = 0 } _read(size) { - this.reads += 1; - this.push(Buffer.alloc(size)); + this.reads += 1 + this.push(Buffer.alloc(size)) } } -const src1 = new TestReader(); -const src2 = new TestReader(); - -src1.pipe(dest); - +const src1 = new TestReader() +const src2 = new TestReader() +src1.pipe(dest) src1.once('readable', () => { process.nextTick(() => { - - src2.pipe(dest); - + src2.pipe(dest) src2.once('readable', () => { process.nextTick(() => { - - src1.unpipe(dest); - }); - }); - }); -}); - - + src1.unpipe(dest) + }) + }) + }) +}) process.on('exit', () => { - assert.strictEqual(src1.reads, 2); - assert.strictEqual(src2.reads, 2); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.strictEqual(src1.reads, 2) + assert.strictEqual(src2.reads, 2) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js index f4c6ac8ed1..629c75592b 100644 --- a/test/parallel/test-stream2-unpipe-leak.js +++ b/test/parallel/test-stream2-unpipe-leak.js @@ -18,71 +18,72 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); + error() {} +} +require('../common') + +const assert = require('assert') -const chunk = Buffer.from('hallo'); +const stream = require('../../lib/ours/index') + +const chunk = Buffer.from('hallo') class TestWriter extends stream.Writable { _write(buffer, encoding, callback) { - callback(null); + callback(null) } } -const dest = new TestWriter(); - -// Set this high so that we'd trigger a nextTick warning +const dest = new TestWriter() // Set this high so that we'd trigger a nextTick warning // and/or RangeError if we do maybeReadMore wrong. + class TestReader extends stream.Readable { constructor() { super({ highWaterMark: 0x10000 - }); + }) } _read(size) { - this.push(chunk); + this.push(chunk) } } -const src = new TestReader(); +const src = new TestReader() for (let i = 0; i < 10; i++) { - src.pipe(dest); - src.unpipe(dest); + src.pipe(dest) + src.unpipe(dest) } -assert.strictEqual(src.listeners('end').length, 0); -assert.strictEqual(src.listeners('readable').length, 0); - -assert.strictEqual(dest.listeners('unpipe').length, 0); -assert.strictEqual(dest.listeners('drain').length, 0); -assert.strictEqual(dest.listeners('error').length, 0); -assert.strictEqual(dest.listeners('close').length, 0); -assert.strictEqual(dest.listeners('finish').length, 0); +assert.strictEqual(src.listeners('end').length, 0) +assert.strictEqual(src.listeners('readable').length, 0) +assert.strictEqual(dest.listeners('unpipe').length, 0) +assert.strictEqual(dest.listeners('drain').length, 0) +assert.strictEqual(dest.listeners('error').length, 0) +assert.strictEqual(dest.listeners('close').length, 0) +assert.strictEqual(dest.listeners('finish').length, 0) +silentConsole.error(src._readableState) +process.on('exit', function () { + src.readableBuffer.length = 0 + silentConsole.error(src._readableState) + assert(src.readableLength >= src.readableHighWaterMark) + silentConsole.log('ok') +}) +/* replacement start */ -silentConsole.error(src._readableState); -process.on('exit', function() { - src.readableBuffer.length = 0; - silentConsole.error(src._readableState); - assert(src.readableLength >= src.readableHighWaterMark); - silentConsole.log('ok'); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js index 2977adeba6..26808e2773 100644 --- a/test/parallel/test-stream2-writable.js +++ b/test/parallel/test-stream2-writable.js @@ -18,457 +18,479 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') - 'use strict' +const silentConsole = { + log() {}, - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; + error() {} +} +const common = require('../common') + +const { Writable: W, Duplex: D } = require('../../lib/ours/index') -const common = require('../common'); -const { Writable: W, Duplex: D } = require('../../lib/ours/index'); -const assert = require('assert'); +const assert = require('assert') class TestWriter extends W { constructor(opts) { - super(opts); - this.buffer = []; - this.written = 0; + super(opts) + this.buffer = [] + this.written = 0 } _write(chunk, encoding, cb) { // Simulate a small unpredictable latency setTimeout(() => { - this.buffer.push(chunk.toString()); - this.written += chunk.length; - cb(); - }, Math.floor(Math.random() * 10)); + this.buffer.push(chunk.toString()) + this.written += chunk.length + cb() + }, Math.floor(Math.random() * 10)) } } -const chunks = new Array(50); +const chunks = new Array(50) + for (let i = 0; i < chunks.length; i++) { - chunks[i] = 'x'.repeat(i); + chunks[i] = 'x'.repeat(i) } { // Verify fast writing const tw = new TestWriter({ highWaterMark: 100 - }); - - tw.on('finish', common.mustCall(function() { - // Got chunks in the right order - assert.deepStrictEqual(tw.buffer, chunks); - })); - - chunks.forEach(function(chunk) { + }) + tw.on( + 'finish', + common.mustCall(function () { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks) + }) + ) + chunks.forEach(function (chunk) { // Ignore backpressure. Just buffer it all up. - tw.write(chunk); - }); - tw.end(); + tw.write(chunk) + }) + tw.end() } - { // Verify slow writing const tw = new TestWriter({ highWaterMark: 100 - }); - - tw.on('finish', common.mustCall(function() { - // Got chunks in the right order - assert.deepStrictEqual(tw.buffer, chunks); - })); - - let i = 0; - (function W() { - tw.write(chunks[i++]); - if (i < chunks.length) - setTimeout(W, 10); - else - tw.end(); - })(); + }) + tw.on( + 'finish', + common.mustCall(function () { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks) + }) + ) + let i = 0 + + ;(function W() { + tw.write(chunks[i++]) + if (i < chunks.length) setTimeout(W, 10) + else tw.end() + })() } - { // Verify write backpressure const tw = new TestWriter({ highWaterMark: 50 - }); - - let drains = 0; - - tw.on('finish', common.mustCall(function() { - // Got chunks in the right order - assert.deepStrictEqual(tw.buffer, chunks); - assert.strictEqual(drains, 17); - })); - - tw.on('drain', function() { - drains++; - }); + }) + let drains = 0 + tw.on( + 'finish', + common.mustCall(function () { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks) + assert.strictEqual(drains, 17) + }) + ) + tw.on('drain', function () { + drains++ + }) + let i = 0 + + ;(function W() { + let ret - let i = 0; - (function W() { - let ret; do { - ret = tw.write(chunks[i++]); - } while (ret !== false && i < chunks.length); + ret = tw.write(chunks[i++]) + } while (ret !== false && i < chunks.length) if (i < chunks.length) { - assert(tw.writableLength >= 50); - tw.once('drain', W); + assert(tw.writableLength >= 50) + tw.once('drain', W) } else { - tw.end(); + tw.end() } - })(); + })() } - { // Verify write buffersize const tw = new TestWriter({ highWaterMark: 100 - }); - - const encodings = - [ 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'latin1', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined ]; - - tw.on('finish', function() { + }) + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + tw.on('finish', function () { // Got the expected chunks - assert.deepStrictEqual(tw.buffer, chunks); - }); - - chunks.forEach(function(chunk, i) { - const enc = encodings[i % encodings.length]; - chunk = Buffer.from(chunk); - tw.write(chunk.toString(enc), enc); - }); + assert.deepStrictEqual(tw.buffer, chunks) + }) + chunks.forEach(function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) } - { // Verify write with no buffersize const tw = new TestWriter({ highWaterMark: 100, decodeStrings: false - }); + }) - tw._write = function(chunk, encoding, cb) { - assert.strictEqual(typeof chunk, 'string'); - chunk = Buffer.from(chunk, encoding); - return TestWriter.prototype._write.call(this, chunk, encoding, cb); - }; - - const encodings = - [ 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'latin1', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined ]; + tw._write = function (chunk, encoding, cb) { + assert.strictEqual(typeof chunk, 'string') + chunk = Buffer.from(chunk, encoding) + return TestWriter.prototype._write.call(this, chunk, encoding, cb) + } - tw.on('finish', function() { + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + tw.on('finish', function () { // Got the expected chunks - assert.deepStrictEqual(tw.buffer, chunks); - }); - - chunks.forEach(function(chunk, i) { - const enc = encodings[i % encodings.length]; - chunk = Buffer.from(chunk); - tw.write(chunk.toString(enc), enc); - }); + assert.deepStrictEqual(tw.buffer, chunks) + }) + chunks.forEach(function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) } - { // Verify write callbacks - const callbacks = chunks.map(function(chunk, i) { - return [i, function() { - callbacks._called[i] = chunk; - }]; - }).reduce(function(set, x) { - set[`callback-${x[0]}`] = x[1]; - return set; - }, {}); - callbacks._called = []; - + const callbacks = chunks + .map(function (chunk, i) { + return [ + i, + function () { + callbacks._called[i] = chunk + } + ] + }) + .reduce(function (set, x) { + set[`callback-${x[0]}`] = x[1] + return set + }, {}) + callbacks._called = [] const tw = new TestWriter({ highWaterMark: 100 - }); - - tw.on('finish', common.mustCall(function() { - process.nextTick(common.mustCall(function() { - // Got chunks in the right order - assert.deepStrictEqual(tw.buffer, chunks); - // Called all callbacks - assert.deepStrictEqual(callbacks._called, chunks); - })); - })); - - chunks.forEach(function(chunk, i) { - tw.write(chunk, callbacks[`callback-${i}`]); - }); - tw.end(); + }) + tw.on( + 'finish', + common.mustCall(function () { + process.nextTick( + common.mustCall(function () { + // Got chunks in the right order + assert.deepStrictEqual(tw.buffer, chunks) // Called all callbacks + + assert.deepStrictEqual(callbacks._called, chunks) + }) + ) + }) + ) + chunks.forEach(function (chunk, i) { + tw.write(chunk, callbacks[`callback-${i}`]) + }) + tw.end() } - { // Verify end() callback - const tw = new TestWriter(); - tw.end(common.mustCall()); + const tw = new TestWriter() + tw.end(common.mustCall()) } - -const helloWorldBuffer = Buffer.from('hello world'); - +const helloWorldBuffer = Buffer.from('hello world') { // Verify end() callback with chunk - const tw = new TestWriter(); - tw.end(helloWorldBuffer, common.mustCall()); + const tw = new TestWriter() + tw.end(helloWorldBuffer, common.mustCall()) } - { // Verify end() callback with chunk and encoding - const tw = new TestWriter(); - tw.end('hello world', 'ascii', common.mustCall()); + const tw = new TestWriter() + tw.end('hello world', 'ascii', common.mustCall()) } - { // Verify end() callback after write() call - const tw = new TestWriter(); - tw.write(helloWorldBuffer); - tw.end(common.mustCall()); + const tw = new TestWriter() + tw.write(helloWorldBuffer) + tw.end(common.mustCall()) } - { // Verify end() callback after write() callback - const tw = new TestWriter(); - let writeCalledback = false; - tw.write(helloWorldBuffer, function() { - writeCalledback = true; - }); - tw.end(common.mustCall(function() { - assert.strictEqual(writeCalledback, true); - })); + const tw = new TestWriter() + let writeCalledback = false + tw.write(helloWorldBuffer, function () { + writeCalledback = true + }) + tw.end( + common.mustCall(function () { + assert.strictEqual(writeCalledback, true) + }) + ) } - { // Verify encoding is ignored for buffers - const tw = new W(); - const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; - tw._write = common.mustCall(function(chunk) { - assert.strictEqual(chunk.toString('hex'), hex); - }); - const buf = Buffer.from(hex, 'hex'); - tw.write(buf, 'latin1'); + const tw = new W() + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' + tw._write = common.mustCall(function (chunk) { + assert.strictEqual(chunk.toString('hex'), hex) + }) + const buf = Buffer.from(hex, 'hex') + tw.write(buf, 'latin1') } - { // Verify writables cannot be piped - const w = new W({ autoDestroy: false }); - w._write = common.mustNotCall(); - let gotError = false; - w.on('error', function() { - gotError = true; - }); - w.pipe(process.stdout); - assert.strictEqual(gotError, true); + const w = new W({ + autoDestroy: false + }) + w._write = common.mustNotCall() + let gotError = false + w.on('error', function () { + gotError = true + }) + w.pipe(process.stdout) + assert.strictEqual(gotError, true) } - { // Verify that duplex streams cannot be piped - const d = new D(); - d._read = common.mustCall(); - d._write = common.mustNotCall(); - let gotError = false; - d.on('error', function() { - gotError = true; - }); - d.pipe(process.stdout); - assert.strictEqual(gotError, false); + const d = new D() + d._read = common.mustCall() + d._write = common.mustNotCall() + let gotError = false + d.on('error', function () { + gotError = true + }) + d.pipe(process.stdout) + assert.strictEqual(gotError, false) } - { // Verify that end(chunk) twice is an error - const w = new W(); + const w = new W() w._write = common.mustCall((msg) => { - assert.strictEqual(msg.toString(), 'this is the end'); - }); - let gotError = false; - w.on('error', function(er) { - gotError = true; - assert.strictEqual(er.message, 'write after end'); - }); - w.end('this is the end'); - w.end('and so is this'); - process.nextTick(common.mustCall(function() { - assert.strictEqual(gotError, true); - })); + assert.strictEqual(msg.toString(), 'this is the end') + }) + let gotError = false + w.on('error', function (er) { + gotError = true + assert.strictEqual(er.message, 'write after end') + }) + w.end('this is the end') + w.end('and so is this') + process.nextTick( + common.mustCall(function () { + assert.strictEqual(gotError, true) + }) + ) } - { // Verify stream doesn't end while writing - const w = new W(); - let wrote = false; - w._write = function(chunk, e, cb) { - assert.strictEqual(this.writing, undefined); - wrote = true; - this.writing = true; + const w = new W() + let wrote = false + + w._write = function (chunk, e, cb) { + assert.strictEqual(this.writing, undefined) + wrote = true + this.writing = true setTimeout(() => { - this.writing = false; - cb(); - }, 1); - }; - w.on('finish', common.mustCall(function() { - assert.strictEqual(wrote, true); - assert.strictEqual(this.writing, false); - })); - w.write(Buffer.alloc(0)); - w.end(); -} + this.writing = false + cb() + }, 1) + } + w.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(wrote, true) + assert.strictEqual(this.writing, false) + }) + ) + w.write(Buffer.alloc(0)) + w.end() +} { // Verify finish does not come before write() callback - const w = new W(); - let writeCb = false; - w._write = function(chunk, e, cb) { - setTimeout(function() { - writeCb = true; - cb(); - }, 10); - }; - w.on('finish', common.mustCall(function() { - assert.strictEqual(writeCb, true); - })); - w.write(Buffer.alloc(0)); - w.end(); -} + const w = new W() + let writeCb = false + + w._write = function (chunk, e, cb) { + setTimeout(function () { + writeCb = true + cb() + }, 10) + } + w.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(writeCb, true) + }) + ) + w.write(Buffer.alloc(0)) + w.end() +} { // Verify finish does not come before synchronous _write() callback - const w = new W(); - let writeCb = false; - w._write = function(chunk, e, cb) { - cb(); - }; - w.on('finish', common.mustCall(function() { - assert.strictEqual(writeCb, true); - })); - w.write(Buffer.alloc(0), function() { - writeCb = true; - }); - w.end(); -} + const w = new W() + let writeCb = false + + w._write = function (chunk, e, cb) { + cb() + } + w.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(writeCb, true) + }) + ) + w.write(Buffer.alloc(0), function () { + writeCb = true + }) + w.end() +} { // Verify finish is emitted if the last chunk is empty - const w = new W(); - w._write = function(chunk, e, cb) { - process.nextTick(cb); - }; - w.on('finish', common.mustCall()); - w.write(Buffer.allocUnsafe(1)); - w.end(Buffer.alloc(0)); -} + const w = new W() + + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } + w.on('finish', common.mustCall()) + w.write(Buffer.allocUnsafe(1)) + w.end(Buffer.alloc(0)) +} { // Verify that finish is emitted after shutdown - const w = new W(); - let shutdown = false; + const w = new W() + let shutdown = false + w._final = common.mustCall(function (cb) { + assert.strictEqual(this, w) + setTimeout(function () { + shutdown = true + cb() + }, 100) + }) + + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } - w._final = common.mustCall(function(cb) { - assert.strictEqual(this, w); - setTimeout(function() { - shutdown = true; - cb(); - }, 100); - }); - w._write = function(chunk, e, cb) { - process.nextTick(cb); - }; - w.on('finish', common.mustCall(function() { - assert.strictEqual(shutdown, true); - })); - w.write(Buffer.allocUnsafe(1)); - w.end(Buffer.allocUnsafe(0)); + w.on( + 'finish', + common.mustCall(function () { + assert.strictEqual(shutdown, true) + }) + ) + w.write(Buffer.allocUnsafe(1)) + w.end(Buffer.allocUnsafe(0)) } - { // Verify that error is only emitted once when failing in _finish. - const w = new W(); - - w._final = common.mustCall(function(cb) { - cb(new Error('test')); - }); - w.on('error', common.mustCall((err) => { - assert.strictEqual(w._writableState.errorEmitted, true); - assert.strictEqual(err.message, 'test'); - w.on('error', common.mustNotCall()); - w.destroy(new Error()); - })); - w.end(); + const w = new W() + w._final = common.mustCall(function (cb) { + cb(new Error('test')) + }) + w.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(w._writableState.errorEmitted, true) + assert.strictEqual(err.message, 'test') + w.on('error', common.mustNotCall()) + w.destroy(new Error()) + }) + ) + w.end() } - { // Verify that error is only emitted once when failing in write. - const w = new W(); - w.on('error', common.mustNotCall()); - assert.throws(() => { - w.write(null); - }, { - code: 'ERR_STREAM_NULL_VALUES' - }); + const w = new W() + w.on('error', common.mustNotCall()) + assert.throws( + () => { + w.write(null) + }, + { + code: 'ERR_STREAM_NULL_VALUES' + } + ) } - { // Verify that error is only emitted once when failing in write after end. - const w = new W(); - w.on('error', common.mustCall((err) => { - assert.strictEqual(w._writableState.errorEmitted, true); - assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END'); - })); - w.end(); - w.write('hello'); - w.destroy(new Error()); + const w = new W() + w.on( + 'error', + common.mustCall((err) => { + assert.strictEqual(w._writableState.errorEmitted, true) + assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END') + }) + ) + w.end() + w.write('hello') + w.destroy(new Error()) } - { // Verify that finish is not emitted after error - const w = new W(); + const w = new W() + w._final = common.mustCall(function (cb) { + cb(new Error()) + }) + + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } - w._final = common.mustCall(function(cb) { - cb(new Error()); - }); - w._write = function(chunk, e, cb) { - process.nextTick(cb); - }; - w.on('error', common.mustCall()); - w.on('prefinish', common.mustNotCall()); - w.on('finish', common.mustNotCall()); - w.write(Buffer.allocUnsafe(1)); - w.end(Buffer.allocUnsafe(0)); + w.on('error', common.mustCall()) + w.on('prefinish', common.mustNotCall()) + w.on('finish', common.mustNotCall()) + w.write(Buffer.allocUnsafe(1)) + w.end(Buffer.allocUnsafe(0)) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js index 665391f69a..861cd5a40f 100644 --- a/test/parallel/test-stream3-cork-end.js +++ b/test/parallel/test-stream3-cork-end.js @@ -1,106 +1,99 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); -const Writable = stream.Writable; +const silentConsole = { + log() {}, -// Test the buffering behavior of Writable streams. + error() {} +} +require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +const Writable = stream.Writable // Test the buffering behavior of Writable streams. // // The call to cork() triggers storing chunks which are flushed // on calling end() and the stream subsequently ended. // // node version target: 0.12 -const expectedChunks = ['please', 'buffer', 'me', 'kindly']; -const inputChunks = expectedChunks.slice(0); -let seenChunks = []; -let seenEnd = false; +const expectedChunks = ['please', 'buffer', 'me', 'kindly'] +const inputChunks = expectedChunks.slice(0) +let seenChunks = [] +let seenEnd = false +const w = new Writable() // Let's arrange to store the chunks. -const w = new Writable(); -// Let's arrange to store the chunks. -w._write = function(chunk, encoding, cb) { +w._write = function (chunk, encoding, cb) { // Stream end event is not seen before the last write. - assert.ok(!seenEnd); - // Default encoding given none was specified. - assert.strictEqual(encoding, 'buffer'); - - seenChunks.push(chunk); - cb(); -}; -// Let's record the stream end event. + assert.ok(!seenEnd) // Default encoding given none was specified. + + assert.strictEqual(encoding, 'buffer') + seenChunks.push(chunk) + cb() +} // Let's record the stream end event. + w.on('finish', () => { - seenEnd = true; -}); + seenEnd = true +}) function writeChunks(remainingChunks, callback) { - const writeChunk = remainingChunks.shift(); - let writeState; + const writeChunk = remainingChunks.shift() + let writeState if (writeChunk) { setImmediate(() => { - writeState = w.write(writeChunk); - // We were not told to stop writing. - assert.ok(writeState); + writeState = w.write(writeChunk) // We were not told to stop writing. - writeChunks(remainingChunks, callback); - }); + assert.ok(writeState) + writeChunks(remainingChunks, callback) + }) } else { - callback(); + callback() } -} +} // Do an initial write. + +w.write('stuff') // The write was immediate. + +assert.strictEqual(seenChunks.length, 1) // Reset the seen chunks. -// Do an initial write. -w.write('stuff'); -// The write was immediate. -assert.strictEqual(seenChunks.length, 1); -// Reset the seen chunks. -seenChunks = []; +seenChunks = [] // Trigger stream buffering. -// Trigger stream buffering. -w.cork(); +w.cork() // Write the bufferedChunks. -// Write the bufferedChunks. writeChunks(inputChunks, () => { // Should not have seen anything yet. - assert.strictEqual(seenChunks.length, 0); + assert.strictEqual(seenChunks.length, 0) // Trigger flush and ending the stream. - // Trigger flush and ending the stream. - w.end(); + w.end() // Stream should not ended in current tick. - // Stream should not ended in current tick. - assert.ok(!seenEnd); + assert.ok(!seenEnd) // Buffered bytes should be seen in current tick. - // Buffered bytes should be seen in current tick. - assert.strictEqual(seenChunks.length, 4); + assert.strictEqual(seenChunks.length, 4) // Did the chunks match. - // Did the chunks match. for (let i = 0, l = expectedChunks.length; i < l; i++) { - const seen = seenChunks[i]; - // There was a chunk. - assert.ok(seen); + const seen = seenChunks[i] // There was a chunk. - const expected = Buffer.from(expectedChunks[i]); - // It was what we expected. - assert.ok(seen.equals(expected)); + assert.ok(seen) + const expected = Buffer.from(expectedChunks[i]) // It was what we expected. + + assert.ok(seen.equals(expected)) } setImmediate(() => { // Stream should have ended in next tick. - assert.ok(seenEnd); - }); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.ok(seenEnd) + }) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js index d146ed1431..a6940337cd 100644 --- a/test/parallel/test-stream3-cork-uncork.js +++ b/test/parallel/test-stream3-cork-uncork.js @@ -1,101 +1,95 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); -const stream = require('../../lib/ours/index'); -const Writable = stream.Writable; +const silentConsole = { + log() {}, -// Test the buffering behavior of Writable streams. + error() {} +} +require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +const Writable = stream.Writable // Test the buffering behavior of Writable streams. // // The call to cork() triggers storing chunks which are flushed // on calling uncork() in the same tick. // // node version target: 0.12 -const expectedChunks = ['please', 'buffer', 'me', 'kindly']; -const inputChunks = expectedChunks.slice(0); -let seenChunks = []; -let seenEnd = false; +const expectedChunks = ['please', 'buffer', 'me', 'kindly'] +const inputChunks = expectedChunks.slice(0) +let seenChunks = [] +let seenEnd = false +const w = new Writable() // Let's arrange to store the chunks. -const w = new Writable(); -// Let's arrange to store the chunks. -w._write = function(chunk, encoding, cb) { +w._write = function (chunk, encoding, cb) { // Default encoding given none was specified. - assert.strictEqual(encoding, 'buffer'); + assert.strictEqual(encoding, 'buffer') + seenChunks.push(chunk) + cb() +} // Let's record the stream end event. - seenChunks.push(chunk); - cb(); -}; -// Let's record the stream end event. w.on('finish', () => { - seenEnd = true; -}); + seenEnd = true +}) function writeChunks(remainingChunks, callback) { - const writeChunk = remainingChunks.shift(); - let writeState; + const writeChunk = remainingChunks.shift() + let writeState if (writeChunk) { setImmediate(() => { - writeState = w.write(writeChunk); - // We were not told to stop writing. - assert.ok(writeState); + writeState = w.write(writeChunk) // We were not told to stop writing. - writeChunks(remainingChunks, callback); - }); + assert.ok(writeState) + writeChunks(remainingChunks, callback) + }) } else { - callback(); + callback() } -} +} // Do an initial write. -// Do an initial write. -w.write('stuff'); -// The write was immediate. -assert.strictEqual(seenChunks.length, 1); -// Reset the chunks seen so far. -seenChunks = []; +w.write('stuff') // The write was immediate. -// Trigger stream buffering. -w.cork(); +assert.strictEqual(seenChunks.length, 1) // Reset the chunks seen so far. + +seenChunks = [] // Trigger stream buffering. + +w.cork() // Write the bufferedChunks. -// Write the bufferedChunks. writeChunks(inputChunks, () => { // Should not have seen anything yet. - assert.strictEqual(seenChunks.length, 0); + assert.strictEqual(seenChunks.length, 0) // Trigger writing out the buffer. - // Trigger writing out the buffer. - w.uncork(); + w.uncork() // Buffered bytes should be seen in current tick. - // Buffered bytes should be seen in current tick. - assert.strictEqual(seenChunks.length, 4); + assert.strictEqual(seenChunks.length, 4) // Did the chunks match. - // Did the chunks match. for (let i = 0, l = expectedChunks.length; i < l; i++) { - const seen = seenChunks[i]; - // There was a chunk. - assert.ok(seen); + const seen = seenChunks[i] // There was a chunk. + + assert.ok(seen) + const expected = Buffer.from(expectedChunks[i]) // It was what we expected. - const expected = Buffer.from(expectedChunks[i]); - // It was what we expected. - assert.ok(seen.equals(expected)); + assert.ok(seen.equals(expected)) } setImmediate(() => { // The stream should not have been ended. - assert.ok(!seenEnd); - }); -}); - - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ + assert.ok(!seenEnd) + }) +}) +/* replacement start */ + +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js index 84a2173e87..11eacc4a09 100644 --- a/test/parallel/test-stream3-pause-then-read.js +++ b/test/parallel/test-stream3-pause-then-read.js @@ -18,168 +18,174 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict' +const tap = require('tap') + +const silentConsole = { + log() {}, + + error() {} +} +require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +const Readable = stream.Readable +const Writable = stream.Writable +const totalChunks = 100 +const chunkSize = 99 +const expectTotalData = totalChunks * chunkSize +let expectEndingData = expectTotalData +const r = new Readable({ + highWaterMark: 1000 +}) +let chunks = totalChunks + +r._read = function (n) { + silentConsole.log('_read called', chunks) + if (!(chunks % 2)) setImmediate(push) + else if (!(chunks % 3)) process.nextTick(push) + else push() +} + +let totalPushed = 0 - 'use strict' - - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -require('../common'); -const assert = require('assert'); - -const stream = require('../../lib/ours/index'); -const Readable = stream.Readable; -const Writable = stream.Writable; - -const totalChunks = 100; -const chunkSize = 99; -const expectTotalData = totalChunks * chunkSize; -let expectEndingData = expectTotalData; - -const r = new Readable({ highWaterMark: 1000 }); -let chunks = totalChunks; -r._read = function(n) { - silentConsole.log('_read called', chunks); - if (!(chunks % 2)) - setImmediate(push); - else if (!(chunks % 3)) - process.nextTick(push); - else - push(); -}; - -let totalPushed = 0; function push() { - const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize, 'x') : null; + const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize, 'x') : null + if (chunk) { - totalPushed += chunk.length; + totalPushed += chunk.length } - silentConsole.log('chunks', chunks); - r.push(chunk); + + silentConsole.log('chunks', chunks) + r.push(chunk) } -read100(); +read100() // First we read 100 bytes. -// First we read 100 bytes. function read100() { - readn(100, onData); + readn(100, onData) } function readn(n, then) { - silentConsole.error(`read ${n}`); - expectEndingData -= n; - (function read() { - const c = r.read(n); - silentConsole.error('c', c); - if (!c) - r.once('readable', read); + silentConsole.error(`read ${n}`) + expectEndingData -= n + ;(function read() { + const c = r.read(n) + silentConsole.error('c', c) + if (!c) r.once('readable', read) else { - assert.strictEqual(c.length, n); - assert(!r.readableFlowing); - then(); + assert.strictEqual(c.length, n) + assert(!r.readableFlowing) + then() } - })(); -} + })() +} // Then we listen to some data events. -// Then we listen to some data events. function onData() { - expectEndingData -= 100; - silentConsole.error('onData'); - let seen = 0; + expectEndingData -= 100 + silentConsole.error('onData') + let seen = 0 r.on('data', function od(c) { - seen += c.length; + seen += c.length + if (seen >= 100) { // Seen enough - r.removeListener('data', od); - r.pause(); + r.removeListener('data', od) + r.pause() + if (seen > 100) { // Oh no, seen too much! // Put the extra back. - const diff = seen - 100; - r.unshift(c.slice(c.length - diff)); - silentConsole.error('seen too much', seen, diff); - } + const diff = seen - 100 + r.unshift(c.slice(c.length - diff)) + silentConsole.error('seen too much', seen, diff) + } // Nothing should be lost in-between. - // Nothing should be lost in-between. - setImmediate(pipeLittle); + setImmediate(pipeLittle) } - }); -} + }) +} // Just pipe 200 bytes, then unshift the extra and unpipe. -// Just pipe 200 bytes, then unshift the extra and unpipe. function pipeLittle() { - expectEndingData -= 200; - silentConsole.error('pipe a little'); - const w = new Writable(); - let written = 0; + expectEndingData -= 200 + silentConsole.error('pipe a little') + const w = new Writable() + let written = 0 w.on('finish', () => { - assert.strictEqual(written, 200); - setImmediate(read1234); - }); - w._write = function(chunk, encoding, cb) { - written += chunk.length; + assert.strictEqual(written, 200) + setImmediate(read1234) + }) + + w._write = function (chunk, encoding, cb) { + written += chunk.length + if (written >= 200) { - r.unpipe(w); - w.end(); - cb(); + r.unpipe(w) + w.end() + cb() + if (written > 200) { - const diff = written - 200; - written -= diff; - r.unshift(chunk.slice(chunk.length - diff)); + const diff = written - 200 + written -= diff + r.unshift(chunk.slice(chunk.length - diff)) } } else { - setImmediate(cb); + setImmediate(cb) } - }; - r.pipe(w); -} + } + + r.pipe(w) +} // Now read 1234 more bytes. -// Now read 1234 more bytes. function read1234() { - readn(1234, resumePause); + readn(1234, resumePause) } function resumePause() { - silentConsole.error('resumePause'); - // Don't read anything, just resume and re-pause a whole bunch. - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - r.resume(); - r.pause(); - setImmediate(pipe); + silentConsole.error('resumePause') // Don't read anything, just resume and re-pause a whole bunch. + + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + r.resume() + r.pause() + setImmediate(pipe) } - function pipe() { - silentConsole.error('pipe the rest'); - const w = new Writable(); - let written = 0; - w._write = function(chunk, encoding, cb) { - written += chunk.length; - cb(); - }; + silentConsole.error('pipe the rest') + const w = new Writable() + let written = 0 + + w._write = function (chunk, encoding, cb) { + written += chunk.length + cb() + } + w.on('finish', () => { - silentConsole.error('written', written, totalPushed); - assert.strictEqual(written, expectEndingData); - assert.strictEqual(totalPushed, expectTotalData); - silentConsole.log('ok'); - }); - r.pipe(w); + silentConsole.error('written', written, totalPushed) + assert.strictEqual(written, expectEndingData) + assert.strictEqual(totalPushed, expectTotalData) + silentConsole.log('ok') + }) + r.pipe(w) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js index b5d04577bf..ab8b499998 100644 --- a/test/parallel/test-streams-highwatermark.js +++ b/test/parallel/test-streams-highwatermark.js @@ -1,102 +1,108 @@ +'use strict' - 'use strict' +const tap = require('tap') - const tap = require('tap'); - const silentConsole = { log() {}, error() {} }; - ; -const common = require('../common'); +const silentConsole = { + log() {}, -const assert = require('assert'); -const stream = require('../../lib/ours/index'); -const { inspect } = require('util'); + error() {} +} +const common = require('../common') + +const assert = require('assert') + +const stream = require('../../lib/ours/index') + +const { inspect } = require('util') { // This test ensures that the stream implementation correctly handles values // for highWaterMark which exceed the range of signed 32 bit integers and // rejects invalid values. - // This number exceeds the range of 32 bit integer arithmetic but should still // be handled correctly. - const ovfl = Number.MAX_SAFE_INTEGER; - - const readable = stream.Readable({ highWaterMark: ovfl }); - assert.strictEqual(readable._readableState.highWaterMark, ovfl); - - const writable = stream.Writable({ highWaterMark: ovfl }); - assert.strictEqual(writable._writableState.highWaterMark, ovfl); + const ovfl = Number.MAX_SAFE_INTEGER + const readable = stream.Readable({ + highWaterMark: ovfl + }) + assert.strictEqual(readable._readableState.highWaterMark, ovfl) + const writable = stream.Writable({ + highWaterMark: ovfl + }) + assert.strictEqual(writable._writableState.highWaterMark, ovfl) for (const invalidHwm of [true, false, '5', {}, -5, NaN]) { for (const type of [stream.Readable, stream.Writable]) { - assert.throws(() => { - type({ highWaterMark: invalidHwm }); - }, { - name: 'TypeError', - code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.highWaterMark' is invalid. " + - `Received ${inspect(invalidHwm)}` - }); + assert.throws( + () => { + type({ + highWaterMark: invalidHwm + }) + }, + { + name: 'TypeError', + code: 'ERR_INVALID_ARG_VALUE', + message: "The property 'options.highWaterMark' is invalid. " + `Received ${inspect(invalidHwm)}` + } + ) } } } - { // This test ensures that the push method's implementation // correctly handles the edge case where the highWaterMark and // the state.length are both zero - - const readable = stream.Readable({ highWaterMark: 0 }); + const readable = stream.Readable({ + highWaterMark: 0 + }) for (let i = 0; i < 3; i++) { - const needMoreData = readable.push(); - assert.strictEqual(needMoreData, true); + const needMoreData = readable.push() + assert.strictEqual(needMoreData, true) } } - { // This test ensures that the read(n) method's implementation // correctly handles the edge case where the highWaterMark, state.length // and n are all zero - - const readable = stream.Readable({ highWaterMark: 0 }); - - readable._read = common.mustCall(); - readable.read(0); + const readable = stream.Readable({ + highWaterMark: 0 + }) + readable._read = common.mustCall() + readable.read(0) } - { // Parse size as decimal integer - ['1', '1.0', 1].forEach((size) => { + ;['1', '1.0', 1].forEach((size) => { const readable = new stream.Readable({ read: common.mustCall(), - highWaterMark: 0, - }); - readable.read(size); - - assert.strictEqual(readable._readableState.highWaterMark, Number(size)); - }); + highWaterMark: 0 + }) + readable.read(size) + assert.strictEqual(readable._readableState.highWaterMark, Number(size)) + }) } - { // Test highwatermark limit - const hwm = 0x40000000 + 1; + const hwm = 0x40000000 + 1 const readable = stream.Readable({ - read() {}, - }); - - assert.throws(() => readable.read(hwm), common.expectsError({ - code: 'ERR_OUT_OF_RANGE', - message: 'The value of "size" is out of range.' + - ' It must be <= 1GiB. Received ' + - hwm, - })); + read() {} + }) + assert.throws( + () => readable.read(hwm), + common.expectsError({ + code: 'ERR_OUT_OF_RANGE', + message: 'The value of "size" is out of range.' + ' It must be <= 1GiB. Received ' + hwm + }) + ) } +/* replacement start */ - /* replacement start */ - process.on('beforeExit', (code) => { - if(code === 0) { - tap.pass('test succeeded'); - } else { - tap.fail(`test failed - exited code ${code}`); - } - }); - /* replacement end */ +process.on('beforeExit', (code) => { + if (code === 0) { + tap.pass('test succeeded') + } else { + tap.fail(`test failed - exited code ${code}`) + } +}) +/* replacement end */ From 24e9a9c6360f194fbc2ca2d9f514e84ba0daf3f7 Mon Sep 17 00:00:00 2001 From: Shogun Date: Thu, 21 Apr 2022 10:57:50 +0200 Subject: [PATCH 06/19] fix: Fixed dependencies. --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 02635d37cc..433adc4be6 100644 --- a/package.json +++ b/package.json @@ -59,7 +59,7 @@ "eslint-config-standard": "^16.0.3", "eslint-plugin-import": "^2.26.0", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-promise": "^5.0.0", "prettier": "^2.6.2", "tap": "^16.0.1", "tape": "^5.5.2", @@ -67,6 +67,6 @@ "undici": "^5.0.0" }, "engines": { - "node": ">= 12.22.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } } From 95f32882195ad56e38e97f08eb9be0230508564c Mon Sep 17 00:00:00 2001 From: Shogun Date: Fri, 13 May 2022 15:03:49 +0200 Subject: [PATCH 07/19] feat: Do not modify globalThis. --- .airtap.yml | 39 - .eslintrc.js | 14 +- .github/workflows/browsers.yml | 5 +- .gitignore | 1 + build/build.mjs | 25 +- build/footers.mjs | 35 - build/headers.mjs | 21 + build/replacements.mjs | 84 +- lib/internal/streams/buffer_list.js | 4 +- lib/internal/streams/duplexify.js | 23 +- lib/internal/streams/from.js | 2 - lib/internal/streams/lazy_transform.js | 2 +- lib/internal/streams/legacy.js | 2 +- lib/internal/streams/operators.js | 6 +- lib/internal/streams/pipeline.js | 7 +- lib/internal/streams/readable.js | 6 +- lib/internal/streams/writable.js | 4 +- lib/internal/validators.js | 2 +- lib/ours/errors.js | 20 +- lib/ours/util.js | 100 ++- lib/stream.js | 2 +- package.json | 39 +- src/errors.js | 17 +- src/test/browser/fixtures/index.html | 72 ++ .../browser/fixtures/rollup.browser.config.js | 27 + src/test/browser/runner.mjs | 109 +++ src/test/browser/symbols.js | 6 + src/test/browser/test-browser.js | 135 +++ src/test/browser/test-stream-big-packet.js | 8 +- src/test/browser/test-stream-big-push.js | 8 +- src/test/browser/test-stream-duplex.js | 8 +- src/test/browser/test-stream-end-paused.js | 8 +- src/test/browser/test-stream-finished.js | 93 +- src/test/browser/test-stream-ispaused.js | 8 +- .../browser/test-stream-pipe-after-end.js | 8 +- .../browser/test-stream-pipe-cleanup-pause.js | 8 +- src/test/browser/test-stream-pipe-cleanup.js | 8 +- .../test-stream-pipe-error-handling.js | 180 ++-- src/test/browser/test-stream-pipe-event.js | 8 +- .../test-stream-pipe-without-listenerCount.js | 8 +- src/test/browser/test-stream-pipeline.js | 157 ++-- src/test/browser/test-stream-push-order.js | 8 +- src/test/browser/test-stream-push-strings.js | 8 +- ...stream-readable-constructor-set-methods.js | 8 +- .../browser/test-stream-readable-event.js | 171 ++-- src/test/browser/test-stream-sync-write.js | 8 +- ...tream-transform-constructor-set-methods.js | 8 +- ...tream-transform-objectmode-falsey-value.js | 8 +- .../test-stream-transform-split-objectmode.js | 8 +- .../test-stream-unshift-empty-chunk.js | 8 +- .../browser/test-stream-unshift-read-race.js | 11 +- ...stream-writable-change-default-encoding.js | 93 +- ...stream-writable-constructor-set-methods.js | 8 +- .../test-stream-writable-decoded-encoding.js | 69 +- src/test/browser/test-stream-writev.js | 13 +- ...est-stream2-base64-single-char-read-end.js | 8 +- .../browser/test-stream2-compatibility.js | 8 +- .../browser/test-stream2-large-read-stall.js | 8 +- src/test/browser/test-stream2-objects.js | 419 ++++----- .../test-stream2-pipe-error-handling.js | 156 ++-- .../test-stream2-pipe-error-once-listener.js | 8 +- src/test/browser/test-stream2-push.js | 8 +- ...st-stream2-readable-empty-buffer-no-eof.js | 155 ++-- .../test-stream2-readable-from-list.js | 83 +- .../test-stream2-readable-legacy-drain.js | 8 +- .../test-stream2-readable-non-empty-end.js | 8 +- .../test-stream2-readable-wrap-empty.js | 8 +- .../browser/test-stream2-readable-wrap.js | 151 ++-- src/test/browser/test-stream2-set-encoding.js | 555 ++++++------ src/test/browser/test-stream2-transform.js | 765 ++++++++--------- src/test/browser/test-stream2-unpipe-drain.js | 8 +- src/test/browser/test-stream2-writable.js | 627 +++++++------- .../browser/test-stream3-pause-then-read.js | 8 +- src/util.js | 101 ++- test/browser/fixtures/index.html | 72 ++ .../browser/fixtures/rollup.browser.config.js | 26 + test/browser/runner.mjs | 109 +++ test/browser/symbols.js | 6 + test/browser/test-browser.js | 126 +++ test/browser/test-stream-big-packet.js | 10 +- test/browser/test-stream-big-push.js | 10 +- test/browser/test-stream-duplex.js | 10 +- test/browser/test-stream-end-paused.js | 10 +- test/browser/test-stream-finished.js | 101 +-- test/browser/test-stream-ispaused.js | 10 +- test/browser/test-stream-pipe-after-end.js | 10 +- .../browser/test-stream-pipe-cleanup-pause.js | 10 +- test/browser/test-stream-pipe-cleanup.js | 10 +- .../test-stream-pipe-error-handling.js | 178 ++-- test/browser/test-stream-pipe-event.js | 10 +- .../test-stream-pipe-without-listenerCount.js | 10 +- test/browser/test-stream-pipeline.js | 165 ++-- test/browser/test-stream-push-order.js | 10 +- test/browser/test-stream-push-strings.js | 10 +- ...stream-readable-constructor-set-methods.js | 10 +- test/browser/test-stream-readable-event.js | 143 ++-- test/browser/test-stream-sync-write.js | 10 +- ...tream-transform-constructor-set-methods.js | 10 +- ...tream-transform-objectmode-falsey-value.js | 10 +- .../test-stream-transform-split-objectmode.js | 10 +- .../test-stream-unshift-empty-chunk.js | 10 +- test/browser/test-stream-unshift-read-race.js | 13 +- ...stream-writable-change-default-encoding.js | 109 +-- ...stream-writable-constructor-set-methods.js | 10 +- .../test-stream-writable-decoded-encoding.js | 69 +- test/browser/test-stream-writev.js | 15 +- ...est-stream2-base64-single-char-read-end.js | 10 +- test/browser/test-stream2-compatibility.js | 10 +- test/browser/test-stream2-large-read-stall.js | 13 +- test/browser/test-stream2-objects.js | 533 ++++++------ .../test-stream2-pipe-error-handling.js | 162 ++-- .../test-stream2-pipe-error-once-listener.js | 10 +- test/browser/test-stream2-push.js | 10 +- ...st-stream2-readable-empty-buffer-no-eof.js | 179 ++-- .../test-stream2-readable-from-list.js | 135 +-- .../test-stream2-readable-legacy-drain.js | 10 +- .../test-stream2-readable-non-empty-end.js | 10 +- .../test-stream2-readable-wrap-empty.js | 10 +- test/browser/test-stream2-readable-wrap.js | 177 ++-- test/browser/test-stream2-set-encoding.js | 525 ++++++------ test/browser/test-stream2-transform.js | 803 +++++++++--------- test/browser/test-stream2-unpipe-drain.js | 10 +- test/browser/test-stream2-writable.js | 623 +++++++------- test/browser/test-stream3-pause-then-read.js | 11 +- test/common/fixtures.mjs | 18 +- test/common/index.js | 33 +- test/common/index.mjs | 10 +- test/parallel/test-stream-add-abort-signal.js | 18 +- test/parallel/test-stream-asIndexedPairs.mjs | 100 +-- test/parallel/test-stream-drop-take.js | 20 +- test/parallel/test-stream-duplex-destroy.js | 18 +- test/parallel/test-stream-duplex-from.js | 8 +- test/parallel/test-stream-filter.js | 18 +- test/parallel/test-stream-finished.js | 18 +- test/parallel/test-stream-flatMap.js | 19 +- test/parallel/test-stream-forEach.js | 18 +- ...-stream-iterator-helpers-test262-tests.mjs | 159 ++-- test/parallel/test-stream-readable-destroy.js | 18 +- test/parallel/test-stream-reduce.js | 18 +- test/parallel/test-stream-some-find-every.mjs | 258 +++--- test/parallel/test-stream-toArray.js | 18 +- ...st-stream-transform-split-highwatermark.js | 1 + ...stream-writable-change-default-encoding.js | 2 + test/parallel/test-stream-writable-destroy.js | 18 +- test/parallel/test-stream3-pause-then-read.js | 1 + 145 files changed, 5706 insertions(+), 4488 deletions(-) delete mode 100644 .airtap.yml create mode 100644 build/headers.mjs create mode 100644 src/test/browser/fixtures/index.html create mode 100644 src/test/browser/fixtures/rollup.browser.config.js create mode 100644 src/test/browser/runner.mjs create mode 100644 src/test/browser/symbols.js create mode 100644 src/test/browser/test-browser.js create mode 100644 test/browser/fixtures/index.html create mode 100644 test/browser/fixtures/rollup.browser.config.js create mode 100644 test/browser/runner.mjs create mode 100644 test/browser/symbols.js create mode 100644 test/browser/test-browser.js diff --git a/.airtap.yml b/.airtap.yml deleted file mode 100644 index 86bbb2e747..0000000000 --- a/.airtap.yml +++ /dev/null @@ -1,39 +0,0 @@ -presets: - all: - providers: - - airtap-playwright - browsers: - - name: chromium - - name: firefox - - name: webkit - - name: chromium - options: - launch: - channel: msedge - - chrome: - providers: - - airtap-playwright - browsers: - - name: chromium - - firefox: - providers: - - airtap-playwright - browsers: - - name: firefox - - safari: - providers: - - airtap-playwright - browsers: - - name: webkit - - edge: - providers: - - airtap-playwright - browsers: - - name: chromium - options: - launch: - channel: msedge diff --git a/.eslintrc.js b/.eslintrc.js index 7b93eee375..76499b9ad6 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,4 +1,7 @@ module.exports = { + parserOptions: { + ecmaVersion: 'latest' + }, extends: ['standard'], rules: { /* @@ -7,5 +10,14 @@ module.exports = { */ 'space-before-function-paren': 0, curly: [2, 'all'] - } + }, + overrides: [ + { + files: ['**/*.mjs'], + parserOptions: { + ecmaVersion: 'latest', + sourceType: 'module' + } + } + ] } diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index 4b7146a51e..98603d92da 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -11,6 +11,7 @@ jobs: matrix: os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] browser: ['chrome', 'firefox', 'safari', 'edge'] + bundler: ['rollup'] exclude: - os: ubuntu-latest browser: safari @@ -32,5 +33,7 @@ jobs: run: npm install - name: Install Browser run: ./node_modules/.bin/playwright install ${{ fromJSON('{"chrome":"chromium","edge":"msedge","firefox":"firefox","safari":"webkit"}')[matrix.browser] }} + - name: Bundle code + run: npm run test:browsers:prepare:${{ matrix.bundler }} - name: Run Tests on Browsers - run: ./node_modules/.bin/airtap -p ${{ matrix.browser }} test/browser/test-*.js + run: npm run test:browsers ${{ matrix.browser }} ${{ matrix.bundler }} diff --git a/.gitignore b/.gitignore index 0b6469a5c9..dd386b93a3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ coverage/ node_modules/ node-*.tar.gz package-lock.json +tmp/ \ No newline at end of file diff --git a/build/build.mjs b/build/build.mjs index 4123afda5b..c94a886754 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -10,6 +10,7 @@ import { request } from 'undici' import prettierConfig from '../prettier.config.cjs' import { aliases, skippedSources, sources } from './files.mjs' import { footers } from './footers.mjs' +import { headers } from './headers.mjs' import { replacements } from './replacements.mjs' const baseMatcher = /^(?:lib|test)/ @@ -67,6 +68,7 @@ async function extract(nodeVersion, tarFile) { async function processFiles(contents) { const replacementsKeys = Object.keys(replacements) + const headersKeys = Object.keys(headers) const footersKeys = Object.keys(footers) prettierConfig.parser = 'babel' @@ -74,6 +76,7 @@ async function processFiles(contents) { for (let [path, content] of contents) { const modifications = [] const matchingReplacements = replacementsKeys.filter((k) => new RegExp(k).test(path)) + const matchingHeaders = headersKeys.filter((k) => new RegExp(k).test(path)) const matchingFooters = footersKeys.filter((k) => new RegExp(k).test(path)) // Perform replacements @@ -87,7 +90,18 @@ async function processFiles(contents) { } } - // Append trailers + // Prepend headers + if (matchingHeaders.length) { + modifications.push(highlightFile('headers', 33)) + + for (const footerKey of matchingHeaders) { + for (const header of headers[footerKey]) { + content = header + content + } + } + } + + // Append footers if (matchingFooters.length) { modifications.push(highlightFile('footers', 33)) @@ -101,7 +115,6 @@ async function processFiles(contents) { // Process the file through babel and prettier if (path.endsWith('.js')) { modifications.push(highlightFile('babel', 33), highlightFile('prettier', 33)) - console.log(prettierConfig) content = prettier.format(await transform(content).code.replaceAll('void 0', 'undefined'), prettierConfig) } @@ -176,9 +189,17 @@ async function main() { } for (const file of await readdir('src/test/browser')) { + if (file.endsWith('fixtures')) { + continue + } + contents.push([`test/browser/${file}`, await readFile(`src/test/browser/${file}`, 'utf-8')]) } + for (const file of await readdir('src/test/browser/fixtures')) { + contents.push([`test/browser/fixtures/${file}`, await readFile(`src/test/browser/fixtures/${file}`, 'utf-8')]) + } + contents.push(['README.md', await readFile('./README.md', 'utf-8')]) // Create paths diff --git a/build/footers.mjs b/build/footers.mjs index 1980fbb7ed..d9316811e6 100644 --- a/build/footers.mjs +++ b/build/footers.mjs @@ -1,37 +1,3 @@ -const testPolyfills = ` - /* replacement start */ - if (typeof Blob === 'undefined') { - let { Blob } = require('buffer') - - if (typeof Blob === 'undefined') { - Blob = require('blob-polyfill').Blob - } - - globalThis.Blob = Blob - allowGlobals(Blob) - } - - if (typeof EventTarget === 'undefined') { - globalThis.EventTarget = require('event-target-shim').EventTarget; - } - - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; - } - - if (typeof AbortSignal === 'undefined') { - globalThis.AbortSignal = require('abort-controller').AbortSignal; - - globalThis.AbortSignal.abort = function() { - const controller = new AbortController(); - controller.abort(); - - return controller.signal; - } - } - /* replacement end */ -` - const testTicksDisableHook = ` /* replacement start */ process.on('beforeExit', (code) => { @@ -53,7 +19,6 @@ const testParallel = ` ` export const footers = { - 'test/common/index.js': testPolyfills, 'test/parallel/test-stream-writable-samecb-singletick.js': testTicksDisableHook, 'test/parallel/.+': testParallel } diff --git a/build/headers.mjs b/build/headers.mjs new file mode 100644 index 0000000000..12ef62db47 --- /dev/null +++ b/build/headers.mjs @@ -0,0 +1,21 @@ +const testPolyfills = ` + /* replacement start */ + const AbortController = globalThis.AbortController || require('abort-controller').AbortController; + const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal; + const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget; + + if(typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function() { + const controller = new AbortController(); + controller.abort(); + + return controller.signal; + } + } + /* replacement end */ +` + +export const headers = { + 'test/parallel/test-stream-(add-abort-signal|drop-take|duplex-destroy|flatMap|forEach|filter|finished|readable-destroy|reduce|toArray|writable-destroy).js': + [testPolyfills] +} diff --git a/build/replacements.mjs b/build/replacements.mjs index aaaaab365c..0933cd1318 100644 --- a/build/replacements.mjs +++ b/build/replacements.mjs @@ -1,13 +1,28 @@ const legacyStreamsRequireStream = ["require\\('stream'\\)", "require('./stream')"] -const internalStreamsBlob = [ - "require\\('../blob'\\);", +const internalStreamsBufferPolyfill = [ + "'use strict'", ` - { - isBlob(b) { - return b instanceof Blob - } - } + 'use strict' + + const bufferModule = require('buffer'); + ` +] + +const internalStreamsAbortControllerPolyfill = [ + "'use strict'", + ` + 'use strict' + + const abortControllerModule = require('abort-controller'); + ` +] + +const internalStreamsNoRequireBlob = [ + "const \\{\\n isBlob,\\n\\} = require\\('internal/blob'\\);", + ` + const Blob = globalThis.Blob || bufferModule.Blob; + const isBlob = typeof Blob !== 'undefined' ? function isBlob (b) { return b instanceof Blob } : function isBlob(b) { return false; } ` ] @@ -15,17 +30,17 @@ const internalStreamsInspectCustom = ['inspect.custom', "Symbol.for('nodejs.util const internalStreamsNoRequireAbortController = [ 'const \\{ AbortController \\} = .+', - ` - if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController; - } - ` + 'const AbortController = globalThis.AbortController || abortControllerModule.AbortController;' ] const internalStreamsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"] +const internalStreamsNoRequireBuffer = ["const \\{ Buffer \\} = require\\('buffer'\\);", ''] + const internalStreamsRequireErrors = ["require\\('internal/errors'\\)", "require('../../ours/errors')"] +const internalStreamsRequireEventEmitter = ['const EE =', 'const { EventEmitter: EE } ='] + const internalStreamsRequirePrimordials = ['= primordials', "= require('../../ours/primordials')"] const internalStreamsRequireRelativeUtil = [ @@ -35,11 +50,16 @@ const internalStreamsRequireRelativeUtil = [ const internalStreamsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./duplex')"] +const internalStreamsRequireStream = ["require\\('stream'\\)", "require('../../stream')"] + const internalStreamsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('./$1')"] -const internalStreamsRequireUtil = ["require\\('internal/util(?:/(?:debuglog|inspect))?'\\)", "require('util')"] +const internalStreamsRequireUtil = [ + "require\\('internal/util(?:/(?:debuglog|inspect))?'\\)", + "require('../../ours/util')" +] -const internalStreamsRequireUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('util')"] +const internalStreamsRequireUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('../../ours/util')"] const internalStreamsRequireWebStream = ["require\\('internal/webstreams/adapters'\\)", '{}'] @@ -72,7 +92,7 @@ const internalValidatorsRequirePrimordials = ['= primordials', "= require('../ou const internalValidatorsRequireRelativeUtil = ["require\\('internal/util'\\)", "require('../ours/util')"] -const internalValidatorsRequireUtilTypes = ["require\\('internal/util/types'\\)", "require('util').types"] +const internalValidatorsRequireUtilTypes = ["require\\('internal/util/types'\\)", "require('../ours/util').types"] const streamIndexIsUint8Array = [ "Stream._isUint8Array = require\\('internal/util/types'\\).isUint8Array;", @@ -93,7 +113,7 @@ const streamIndexRequirePrimordials = ['= primordials', "= require('./ours/primo const streamIndexRequirePromises = ["require\\('stream/promises'\\);", "require('./stream/promises');"] -const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('util')"] +const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('./ours/util')"] const streamIndexUint8ArrayToBuffer = ['new internalBuffer.FastBuffer', 'Buffer.from'] @@ -107,7 +127,7 @@ const testCommonKnownGlobals = [ 'let knownGlobals = \\[(\\n\\s+)', ` let knownGlobals = [\n - typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), + typeof AggregateError !== 'undefined' ? AggregateError : require('../../lib/ours/util').AggregateError, typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, @@ -136,14 +156,13 @@ const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/ours/ const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"] const testParallelDuplexFromBlob = [ - "const \\{ Blob \\} = require\\('buffer'\\)", - ` - let {Blob} = require('buffer'); + "const \\{ Blob \\} = require\\('buffer'\\);", + "const Blob = globalThis.Blob || require('buffer').Blob" +] - if (typeof Blob === 'undefined') { - Blob = require('blob-polyfill').Blob; - } - ` +const testParallelDuplexSkipWithoutBlob = [ + "(\\{\n const blob = new Blob\\(\\['blob'\\]\\))", + "if (typeof Blob !== 'undefined') $1" ] const testParallelFinishedEvent = ["res.on\\('close", "res.on('finish"] @@ -204,19 +223,30 @@ const readmeLink = ['(\\[Node.js website\\]\\(https://nodejs.org/dist/v)(\\d+.\\ export const replacements = { 'lib/_stream.+': [legacyStreamsRequireStream], + 'lib/internal/streams/duplexify.+': [ + internalStreamsBufferPolyfill, + internalStreamsAbortControllerPolyfill, + internalStreamsNoRequireBlob, + internalStreamsNoRequireAbortController + ], + 'lib/internal/streams/(operators|pipeline).+': [ + internalStreamsAbortControllerPolyfill, + internalStreamsNoRequireAbortController + ], 'lib/internal/streams/.+': [ - internalStreamsNoRequireAbortController, + internalStreamsNoRequireBuffer, internalStreamsRequireErrors, + internalStreamsRequireEventEmitter, internalStreamsRequirePrimordials, internalStreamsRequireRelativeDuplex, internalStreamsRequireRelativeUtil, + internalStreamsRequireStream, internalStreamsRequireStreams, internalStreamsRequireUtil, internalStreamsRequireUtilDebuglog, internalStreamsRequireWebStream, internalStreamsRequireInternal, internalStreamsWeakHandler, - internalStreamsBlob, internalStreamsInspectCustom ], 'lib/internal/validators.js': [ @@ -256,7 +286,7 @@ export const replacements = { testParallelSilentConsole, testParallelTimersPromises ], - 'test/parallel/test-stream-duplex-from.js': [testParallelDuplexFromBlob], + 'test/parallel/test-stream-duplex-from.js': [testParallelDuplexFromBlob, testParallelDuplexSkipWithoutBlob], 'test/parallel/test-stream-finished.js': [testParallelFinishedEvent], 'test/parallel/test-stream-flatMap.js': [testParallelFlatMapWinLineSeparator], 'test/parallel/test-stream-preprocess.js': [testParallelPreprocessWinLineSeparator], diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js index 01fd48ecf3..6a37442bae 100644 --- a/lib/internal/streams/buffer_list.js +++ b/lib/internal/streams/buffer_list.js @@ -2,9 +2,7 @@ const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials') -const { Buffer } = require('buffer') - -const { inspect } = require('util') +const { inspect } = require('../../ours/util') module.exports = class BufferList { constructor() { diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js index 085729cfde..5250ad0195 100644 --- a/lib/internal/streams/duplexify.js +++ b/lib/internal/streams/duplexify.js @@ -1,5 +1,9 @@ 'use strict' +const abortControllerModule = require('abort-controller') + +const bufferModule = require('buffer') + const { isReadable, isWritable, @@ -27,15 +31,16 @@ const { createDeferredPromise } = require('../../ours/util') const from = require('./from') -const { isBlob } = { - isBlob(b) { - return b instanceof Blob - } -} - -if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController -} +const Blob = globalThis.Blob || bufferModule.Blob +const isBlob = + typeof Blob !== 'undefined' + ? function isBlob(b) { + return b instanceof Blob + } + : function isBlob(b) { + return false + } +const AbortController = globalThis.AbortController || abortControllerModule.AbortController const { FunctionPrototypeCall } = require('../../ours/primordials') // This is needed for pre node 17. diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js index 93fa992888..6d40ac0aeb 100644 --- a/lib/internal/streams/from.js +++ b/lib/internal/streams/from.js @@ -2,8 +2,6 @@ const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials') -const { Buffer } = require('buffer') - const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes function from(Readable, iterable, opts) { diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js index 12bdff88b9..06073b0d13 100644 --- a/lib/internal/streams/lazy_transform.js +++ b/lib/internal/streams/lazy_transform.js @@ -5,7 +5,7 @@ const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials') -const stream = require('stream') +const stream = require('../../stream') const { getDefaultEncoding } = require('../crypto/util') diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js index 3c95468704..09c3b72013 100644 --- a/lib/internal/streams/legacy.js +++ b/lib/internal/streams/legacy.js @@ -2,7 +2,7 @@ const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials') -const EE = require('events') +const { EventEmitter: EE } = require('events') function Stream(opts) { EE.call(this, opts) diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js index cf0f7b3b9a..fdea0ef709 100644 --- a/lib/internal/streams/operators.js +++ b/lib/internal/streams/operators.js @@ -1,8 +1,8 @@ 'use strict' -if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController -} +const abortControllerModule = require('abort-controller') + +const AbortController = globalThis.AbortController || abortControllerModule.AbortController const { codes: { ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index 3deb0f2d9f..4e6e82e1c6 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -2,6 +2,8 @@ // permission from the author, Mathias Buus (@mafintosh). 'use strict' +const abortControllerModule = require('abort-controller') + const { ArrayIsArray, Promise, SymbolAsyncIterator } = require('../../ours/primordials') const eos = require('./end-of-stream') @@ -22,10 +24,7 @@ const { validateFunction, validateAbortSignal } = require('../validators') const { isIterable, isReadable, isReadableNodeStream, isNodeStream } = require('./utils') -if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController -} - +const AbortController = globalThis.AbortController || abortControllerModule.AbortController let PassThrough let Readable diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index f8b899a623..9c263c8a6e 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -37,17 +37,15 @@ const { module.exports = Readable Readable.ReadableState = ReadableState -const EE = require('events') +const { EventEmitter: EE } = require('events') const { Stream, prependListener } = require('./legacy') -const { Buffer } = require('buffer') - const { addAbortSignal } = require('./add-abort-signal') const eos = require('./end-of-stream') -let debug = require('util').debuglog('stream', (fn) => { +let debug = require('../../ours/util').debuglog('stream', (fn) => { debug = fn }) diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index 6cb99f3a0a..ac97ec0d17 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -38,12 +38,10 @@ const { module.exports = Writable Writable.WritableState = WritableState -const EE = require('events') +const { EventEmitter: EE } = require('events') const Stream = require('./legacy').Stream -const { Buffer } = require('buffer') - const destroyImpl = require('./destroy') const { addAbortSignal } = require('./add-abort-signal') diff --git a/lib/internal/validators.js b/lib/internal/validators.js index c84abf40a7..11689d2ba4 100644 --- a/lib/internal/validators.js +++ b/lib/internal/validators.js @@ -22,7 +22,7 @@ const { const { normalizeEncoding } = require('../ours/util') -const { isAsyncFunction, isArrayBufferView } = require('util').types +const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types const signals = {} diff --git a/lib/ours/errors.js b/lib/ours/errors.js index 355cfd019c..fab32d2549 100644 --- a/lib/ours/errors.js +++ b/lib/ours/errors.js @@ -1,4 +1,6 @@ 'use strict' + +const { format, inspect, AggregateError: CustomAggregateError } = require('./util') /* This file is a reduced and adapted version of the main lib/internal/errors.js file defined at @@ -8,14 +10,7 @@ with the upstream file. */ -if (typeof AggregateError === 'undefined') { - globalThis.AggregateError = require('aggregate-error') -} - -const assert = require('assert') - -const { inspect, format } = require('util') - +const AggregateError = globalThis.AggregateError || CustomAggregateError const kIsNodeError = Symbol('kIsNodeError') const kTypes = [ 'string', @@ -30,7 +25,13 @@ const kTypes = [ ] const classRegExp = /^([A-Z][a-z0-9]*)+$/ const nodeInternalPrefix = '__node_internal_' -const codes = {} // Only use this for integers! Decimal numbers do not work with this function. +const codes = {} + +function assert(value, message) { + if (!value) { + throw new codes.ERR_INTERNAL_ASSERTION(message) + } +} // Only use this for integers! Decimal numbers do not work with this function. function addNumericalSeparator(val) { let res = '' @@ -130,6 +131,7 @@ class AbortError extends Error { } } +E('ERR_ASSERTION', '%s', Error) E( 'ERR_INVALID_ARG_TYPE', (name, expected, actual) => { diff --git a/lib/ours/util.js b/lib/ours/util.js index 5d39c25869..275ac68ce1 100644 --- a/lib/ours/util.js +++ b/lib/ours/util.js @@ -1,18 +1,44 @@ 'use strict' +const bufferModule = require('buffer') + const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor +const Blob = globalThis.Blob || bufferModule.Blob +/* eslint-disable indent */ + +const isBlob = + typeof Blob !== 'undefined' + ? function isBlob(b) { + // eslint-disable-next-line indent + return b instanceof Blob + } + : function isBlob(b) { + return false + } +/* eslint-enable indent */ +// This is a simplified version of AggregateError -if (typeof Blob === 'undefined') { - let { Blob } = require('buffer') +class AggregateError extends Error { + constructor(errors) { + if (!Array.isArray(errors)) { + throw new TypeError(`Expected input to be an Array, got ${typeof errors}`) + } - if (typeof Blob === 'undefined') { - Blob = require('blob-polyfill').Blob - } + let message = '' - globalThis.Blob = Blob + for (let i = 0; i < errors.length; i++) { + message += ` ${errors[i].stack}\n` + } + + super(message) + this.name = 'AggregateError' + this.errors = errors + } } module.exports = { + AggregateError, + once(callback) { let called = false return function (...args) { @@ -40,7 +66,18 @@ module.exports = { } }, - // All following functions are just used in browser + promisify(fn) { + return new Promise((resolve, reject) => { + fn((err, ...args) => { + if (err) { + return reject(err) + } + + return resolve(...args) + }) + }) + }, + debuglog() { return function () {} }, @@ -54,25 +91,50 @@ module.exports = { return replacement.toFixed(6) } else if (type === 'j') { return JSON.stringify(replacement) + } else if (type === 's' && typeof replacement === 'object') { + const ctor = replacement.constructor !== Object ? replacement.constructor.name : '' + return `${ctor} {}`.trim() } else { return replacement.toString() } }) }, - promisify(fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err) + inspect(value) { + // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options + switch (typeof value) { + case 'string': + if (value.includes("'")) { + if (!value.includes('"')) { + return `"${value}"` + } else if (!value.includes('`') && !value.includes('${')) { + return `\`${value}\`` + } } - return resolve(...args) - }) - }) + return `'${value}'` + + case 'number': + if (isNaN(value)) { + return 'NaN' + } else if (Object.is(value, -0)) { + return String(value) + } + + return value + + case 'bigint': + return `${String(value)}n` + + case 'boolean': + case 'undefined': + return String(value) + + case 'object': + return '{}' + } }, - inspect: require('object-inspect'), types: { isAsyncFunction(fn) { return fn instanceof AsyncFunction @@ -82,10 +144,6 @@ module.exports = { return ArrayBuffer.isView(arr) } }, - - isBlob(blob) { - // eslint-disable-next-line no-undef - return blob instanceof Blob - } + isBlob } module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/lib/stream.js b/lib/stream.js index 8f7ed9d70d..3112086306 100644 --- a/lib/stream.js +++ b/lib/stream.js @@ -24,7 +24,7 @@ const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primo const { promisify: { custom: customPromisify } -} = require('util') +} = require('./ours/util') const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators') diff --git a/package.json b/package.json index 433adc4be6..ef8eeac8ae 100644 --- a/package.json +++ b/package.json @@ -36,35 +36,40 @@ "build": "node build/build.mjs", "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test:browsers": "airtap -p all test/browser/test-*.js", + "test:browsers": "node test/browser/runner.mjs", + "test:browsers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.browser.config.js && cp test/browser/fixtures/index.html tmp/rollup", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src lib test", "lint": "eslint src" }, "dependencies": { - "abort-controller": "^3.0.0", - "aggregate-error": "^3.1.0", - "blob-polyfill": "^7.0.20220408", - "object-inspect": "^1.12.0" + "abort-controller": "^3.0.0" }, "devDependencies": { - "@babel/core": "^7.17.9", + "@babel/core": "^7.17.10", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7", "@babel/plugin-proposal-optional-chaining": "^7.16.7", - "@sinonjs/fake-timers": "^9.1.1", - "airtap": "^4.0.4", - "airtap-playwright": "^1.0.1", - "c8": "^7.11.0", - "eslint": "^7.32.0", - "eslint-config-standard": "^16.0.3", + "@rollup/plugin-commonjs": "^22.0.0", + "@rollup/plugin-inject": "^4.0.4", + "@rollup/plugin-node-resolve": "^13.3.0", + "@sinonjs/fake-timers": "^9.1.2", + "buffer-es6": "^4.9.3", + "c8": "^7.11.2", + "eslint": "^8.15.0", + "eslint-config-standard": "^17.0.0", "eslint-plugin-import": "^2.26.0", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.0.0", + "eslint-plugin-n": "^15.2.0", + "eslint-plugin-promise": "^6.0.0", + "playwright": "^1.21.1", "prettier": "^2.6.2", - "tap": "^16.0.1", - "tape": "^5.5.2", + "process-es6": "^0.11.6", + "rollup": "^2.72.1", + "rollup-plugin-polyfill-node": "^0.9.0", + "tap": "^16.2.0", + "tap-mocha-reporter": "^5.0.3", + "tape": "^5.5.3", "tar": "^6.1.11", - "undici": "^5.0.0" + "undici": "^5.1.1" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" diff --git a/src/errors.js b/src/errors.js index 07948aa1d0..84dc5c2c9a 100644 --- a/src/errors.js +++ b/src/errors.js @@ -1,5 +1,7 @@ 'use strict' +const { format, inspect, AggregateError: CustomAggregateError } = require('./util') + /* This file is a reduced and adapted version of the main lib/internal/errors.js file defined at @@ -9,12 +11,7 @@ with the upstream file. */ -if (typeof AggregateError === 'undefined') { - globalThis.AggregateError = require('aggregate-error') -} - -const assert = require('assert') -const { inspect, format } = require('util') +const AggregateError = globalThis.AggregateError || CustomAggregateError const kIsNodeError = Symbol('kIsNodeError') const kTypes = [ @@ -33,6 +30,12 @@ const classRegExp = /^([A-Z][a-z0-9]*)+$/ const nodeInternalPrefix = '__node_internal_' const codes = {} +function assert(value, message) { + if (!value) { + throw new codes.ERR_INTERNAL_ASSERTION(message) + } +} + // Only use this for integers! Decimal numbers do not work with this function. function addNumericalSeparator(val) { let res = '' @@ -129,6 +132,8 @@ class AbortError extends Error { } } +E('ERR_ASSERTION', '%s', Error) + E( 'ERR_INVALID_ARG_TYPE', (name, expected, actual) => { diff --git a/src/test/browser/fixtures/index.html b/src/test/browser/fixtures/index.html new file mode 100644 index 0000000000..603eef3c50 --- /dev/null +++ b/src/test/browser/fixtures/index.html @@ -0,0 +1,72 @@ + + + + + + +
+ + + + + diff --git a/src/test/browser/fixtures/rollup.browser.config.js b/src/test/browser/fixtures/rollup.browser.config.js new file mode 100644 index 0000000000..4305c839eb --- /dev/null +++ b/src/test/browser/fixtures/rollup.browser.config.js @@ -0,0 +1,27 @@ +import commonjs from '@rollup/plugin-commonjs' +import inject from '@rollup/plugin-inject' +import nodeResolve from '@rollup/plugin-node-resolve' +import { resolve } from 'node:path' +import nodePolyfill from 'rollup-plugin-polyfill-node' + +export default { + input: ['test/browser/test-browser.js'], + output: { + intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }', + file: 'tmp/rollup/suite.js', + format: 'iife', + name: 'readableStreamTestSuite' + }, + plugins: [ + commonjs(), + nodePolyfill(), + inject({ + process: resolve('node_modules/process-es6/browser.js'), + Buffer: [resolve('node_modules/buffer-es6/index.js'), 'Buffer'] + }), + nodeResolve({ + browser: true, + preferBuiltins: false + }) + ] +} diff --git a/src/test/browser/runner.mjs b/src/test/browser/runner.mjs new file mode 100644 index 0000000000..db7f0ac31b --- /dev/null +++ b/src/test/browser/runner.mjs @@ -0,0 +1,109 @@ +import { dirname, resolve } from 'node:path' +import { Readable } from 'node:stream' +import { chromium, firefox, webkit } from 'playwright' +import reporter from 'tap-mocha-reporter' +import Parser from 'tap-parser' + +const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] +const validBundlers = ['browserify', 'webpack', 'rollup'] + +function parseEnviroment() { + const headless = process.env.HEADLESS !== 'false' + const reporter = process.env.SKIP_REPORTER !== 'true' + + let [browser, bundler] = process.argv.slice(2, 4) + + if (!browser) { + browser = process.env.BROWSER + } + + if (!bundler) { + bundler = process.env.BUNDLER + } + + if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { + console.error('Usage: node runner.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') + console.error('\nYou can also use the BROWSER and BUNDLER environment variables') + process.exit(1) + } + + return { browser, bundler, headless, reporter } +} + +function createBrowser({ browser: id, headless }) { + switch (id) { + case 'firefox': + return firefox.launch({ headless }) + case 'safari': + return webkit.launch({ headless }) + case 'edge': + return chromium.launch({ headless, channel: 'msedge' }) + default: + return chromium.launch({ headless }) + } +} + +function setupTape(page, configuration) { + const output = new Readable({ read() {} }) + const parser = new Parser({ strict: true }) + + output.pipe(parser) + + if (configuration.reporter) { + output.pipe(reporter('spec')) + } + + parser.on('line', (line) => { + if (line !== '# readable-stream-finished\n') { + if (line.startsWith('# not ok')) { + process.exitCode = 1 + } + + if (!configuration.reporter) { + console.log(line.replace(/\n$/, '')) + } + + return + } + + output.push(null) + + if (configuration.headless) { + browser.close() + } + }) + + page.on('console', (msg) => { + if (msg.type() === 'error') { + console.error(`\x1b[31m\x1b[1mconsole.error:\x1b[0m ${msg.text()}\n`) + return + } + + output.push(msg.text() + '\n') + }) + + // Firefox in headless mode is showing an error even if onerror caught it. Disable in that case + if (!configuration.headless || configuration.browser !== 'firefox') { + page.on('pageerror', (err) => { + console.log('\x1b[31m\x1b[1m--- The browser thrown an uncaught error ---\x1b[0m') + console.log(err.stack) + + if (configuration.headless) { + console.log('\x1b[31m\x1b[1m--- Exiting with exit code 1 ---\x1b[0m') + process.exit(1) + } else { + process.exitCode = 1 + } + }) + } +} + +const configuration = parseEnviroment() +const browser = await createBrowser(configuration) +const page = await browser.newPage() +setupTape(page, configuration) + +// Execute the test suite +await page.goto( + `file://${resolve(dirname(new URL(import.meta.url).pathname), `../../tmp/${configuration.bundler}/index.html`)}` +) diff --git a/src/test/browser/symbols.js b/src/test/browser/symbols.js new file mode 100644 index 0000000000..8450b8f64c --- /dev/null +++ b/src/test/browser/symbols.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = { + kReadableStreamSuiteName: Symbol('readable-stream.suiteName'), + kReadableStreamSuiteHasMultipleTests: Symbol('readable-stream.suiteHasMultipleTests') +} diff --git a/src/test/browser/test-browser.js b/src/test/browser/test-browser.js new file mode 100644 index 0000000000..ade5ac9463 --- /dev/null +++ b/src/test/browser/test-browser.js @@ -0,0 +1,135 @@ +'use strict' + +const tape = require('tape') +const { createDeferredPromise } = require('../../lib/ours/util') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +let totalTests = 0 +let completed = 0 +let failed = 0 + +async function test(rootName, fn) { + // Gather all tests in the file + const tests = {} + + function addTests(name, fn) { + tests[`${rootName} - ${name}`] = fn + } + + if (fn[kReadableStreamSuiteHasMultipleTests]) { + fn(addTests) + } else { + tests[rootName] = fn + } + + // Execute each test in a separate harness and then output overall results + for (const [name, subtest] of Object.entries(tests)) { + const currentIndex = ++totalTests + const harness = tape.createHarness() + const { promise, resolve } = createDeferredPromise() + + const messages = [`# Subtest: ${name}`] + + harness.createStream().on('data', function (row) { + if (row.startsWith('TAP version') || row.match(new RegExp(`^# (?:${name})`))) { + return + } + + messages.push(row.trim().replace(/^/gm, ' ')) + }) + + harness.onFinish(() => { + const success = harness._exitCode === 0 + + messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`) + console.log(messages.join('\n')) + completed++ + + if (!success) { + failed++ + } + + resolve() + }) + + harness(name, subtest) + + await promise + } +} + +async function runTests(suites) { + // Setup an interval + const interval = setInterval(() => { + if (completed < totalTests) { + return + } + + clearInterval(interval) + + console.log(`1..${totalTests}`) + console.log(`# tests ${totalTests}`) + console.log(`# pass ${completed - failed}`) + console.log(`# fail ${failed}`) + console.log(`# ${failed === 0 ? 'ok' : 'not ok'}`) + + // This line is used by the playwright script to detect we're done + console.log('# readable-stream-finished') + }, 100) + + // Execute each test serially, to avoid side-effects errors when dealing with global error handling + for (const suite of suites) { + await test(suite[kReadableStreamSuiteName], suite) + } +} + +// Important: Do not try to make the require dynamic because bundlers will not like it +runTests([ + require('./test-stream-big-packet'), + require('./test-stream-big-push'), + require('./test-stream-duplex'), + require('./test-stream-end-paused'), + require('./test-stream-finished'), + require('./test-stream-ispaused'), + require('./test-stream-pipe-after-end'), + require('./test-stream-pipe-cleanup-pause'), + require('./test-stream-pipe-cleanup'), + require('./test-stream-pipe-error-handling'), + require('./test-stream-pipe-event'), + require('./test-stream-pipe-without-listenerCount'), + require('./test-stream-pipeline'), + require('./test-stream-push-order'), + require('./test-stream-push-strings'), + require('./test-stream-readable-constructor-set-methods'), + require('./test-stream-readable-event'), + require('./test-stream-sync-write'), + require('./test-stream-transform-constructor-set-methods'), + require('./test-stream-transform-objectmode-falsey-value'), + require('./test-stream-transform-split-objectmode'), + require('./test-stream-unshift-empty-chunk'), + require('./test-stream-unshift-read-race'), + require('./test-stream-writable-change-default-encoding'), + require('./test-stream-writable-constructor-set-methods'), + require('./test-stream-writable-decoded-encoding'), + require('./test-stream-writev'), + require('./test-stream2-base64-single-char-read-end'), + require('./test-stream2-compatibility'), + require('./test-stream2-large-read-stall'), + require('./test-stream2-objects'), + require('./test-stream2-pipe-error-handling'), + require('./test-stream2-pipe-error-once-listener'), + require('./test-stream2-push'), + require('./test-stream2-readable-empty-buffer-no-eof'), + require('./test-stream2-readable-from-list'), + require('./test-stream2-readable-legacy-drain'), + require('./test-stream2-readable-non-empty-end'), + require('./test-stream2-readable-wrap-empty'), + require('./test-stream2-readable-wrap'), + require('./test-stream2-set-encoding'), + require('./test-stream2-transform'), + require('./test-stream2-unpipe-drain'), + require('./test-stream2-writable'), + require('./test-stream3-pause-then-read') +]).catch((e) => { + console.error(e) +}) diff --git a/src/test/browser/test-stream-big-packet.js b/src/test/browser/test-stream-big-packet.js index 5c096e5115..8859e4b441 100644 --- a/src/test/browser/test-stream-big-packet.js +++ b/src/test/browser/test-stream-big-packet.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Transform } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('big packet', function (t) { +module.exports = function (t) { t.plan(3) let passed = false @@ -65,4 +65,6 @@ test('big packet', function (t) { } return -1 } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-big-packet' diff --git a/src/test/browser/test-stream-big-push.js b/src/test/browser/test-stream-big-push.js index 97a5cff3a6..08d8873f3a 100644 --- a/src/test/browser/test-stream-big-push.js +++ b/src/test/browser/test-stream-big-push.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('big push', function (t) { +module.exports = function (t) { t.plan(10) const str = 'asdfasdfasdfasdfasdf' @@ -67,4 +67,6 @@ test('big push', function (t) { t.ok(ended) t.equal(reads, 2) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-big-push' diff --git a/src/test/browser/test-stream-duplex.js b/src/test/browser/test-stream-duplex.js index d35e641c3d..1278591382 100644 --- a/src/test/browser/test-stream-duplex.js +++ b/src/test/browser/test-stream-duplex.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Duplex } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('duplex', function (t) { +module.exports = function (t) { t.plan(4) const stream = new Duplex({ objectMode: true }) @@ -33,4 +33,6 @@ test('duplex', function (t) { stream.push({ val: 1 }) stream.end({ val: 2 }) stream.push(null) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-duplex' diff --git a/src/test/browser/test-stream-end-paused.js b/src/test/browser/test-stream-end-paused.js index 7a8909674f..76a98da510 100644 --- a/src/test/browser/test-stream-end-paused.js +++ b/src/test/browser/test-stream-end-paused.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('end pause', function (t) { +module.exports = function (t) { t.plan(2) const stream = new Readable() @@ -27,4 +27,6 @@ test('end pause', function (t) { }) stream.resume() }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-end-paused' diff --git a/src/test/browser/test-stream-finished.js b/src/test/browser/test-stream-finished.js index 6c688c0a22..f9ddc907f9 100644 --- a/src/test/browser/test-stream-finished.js +++ b/src/test/browser/test-stream-finished.js @@ -1,65 +1,70 @@ 'use strict' -const test = require('tape') const { Writable, Readable, Transform, finished } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') -test('readable finished', function (t) { - t.plan(1) +module.exports = function (test) { + test('readable finished', function (t) { + t.plan(1) - const rs = new Readable({ - read: function read() {} - }) + const rs = new Readable({ + read: function read() {} + }) + + finished(rs, (err) => { + t.ifErr(err) + }) - finished(rs, (err) => { - t.ifErr(err) + rs.push(null) + rs.resume() }) - rs.push(null) - rs.resume() -}) + test('writable finished', function (t) { + t.plan(1) -test('writable finished', function (t) { - t.plan(1) + const ws = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) - const ws = new Writable({ - write: function write(data, enc, cb) { - cb() - } - }) + finished(ws, (err) => { + t.ifErr(err) + }) - finished(ws, (err) => { - t.ifErr(err) + ws.end() }) - ws.end() -}) + test('transform finished', function (t) { + t.plan(3) -test('transform finished', function (t) { - t.plan(3) + const tr = new Transform({ + transform: function transform(data, enc, cb) { + cb() + } + }) - const tr = new Transform({ - transform: function transform(data, enc, cb) { - cb() - } - }) + let finish = false + let ended = false - let finish = false - let ended = false + tr.on('end', function () { + ended = true + }) - tr.on('end', function () { - ended = true - }) + tr.on('finish', function () { + finish = true + }) - tr.on('finish', function () { - finish = true - }) + finished(tr, (err) => { + t.ifErr(err) + t.ok(finish) + t.ok(ended) + }) - finished(tr, (err) => { - t.ifErr(err) - t.ok(finish) - t.ok(ended) + tr.end() + tr.resume() }) +} - tr.end() - tr.resume() -}) +module.exports[kReadableStreamSuiteName] = 'stream-finished' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream-ispaused.js b/src/test/browser/test-stream-ispaused.js index f9cf113f1c..36e55d7c9e 100644 --- a/src/test/browser/test-stream-ispaused.js +++ b/src/test/browser/test-stream-ispaused.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('is paused', function (t) { +module.exports = function (t) { t.plan(4) const readable = new stream.Readable() @@ -24,4 +24,6 @@ test('is paused', function (t) { t.ok(readable.isPaused()) readable.resume() t.notOk(readable.isPaused()) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-ispaused' diff --git a/src/test/browser/test-stream-pipe-after-end.js b/src/test/browser/test-stream-pipe-after-end.js index dc33a4e645..13aac69313 100644 --- a/src/test/browser/test-stream-pipe-after-end.js +++ b/src/test/browser/test-stream-pipe-after-end.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pipe after end', function (t) { +module.exports = function (t) { t.plan(4) function TestReadable(opt) { @@ -64,4 +64,6 @@ test('pipe after end', function (t) { piper.pipe(w) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-after-end' diff --git a/src/test/browser/test-stream-pipe-cleanup-pause.js b/src/test/browser/test-stream-pipe-cleanup-pause.js index 2ca267511b..53078d3b7b 100644 --- a/src/test/browser/test-stream-pipe-cleanup-pause.js +++ b/src/test/browser/test-stream-pipe-cleanup-pause.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pipe cleanup pause', function (t) { +module.exports = function (t) { t.plan(3) const reader = new stream.Readable() @@ -43,4 +43,6 @@ test('pipe cleanup pause', function (t) { reader.pipe(writer1) reader.push(buffer) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup-pause' diff --git a/src/test/browser/test-stream-pipe-cleanup.js b/src/test/browser/test-stream-pipe-cleanup.js index ee07304d6b..9dcf0dad90 100644 --- a/src/test/browser/test-stream-pipe-cleanup.js +++ b/src/test/browser/test-stream-pipe-cleanup.js @@ -2,11 +2,11 @@ // This test asserts that Stream.prototype.pipe does not leave listeners // hanging on the source or dest. -const test = require('tape') const inherits = require('inherits') const { Stream } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pipe cleanup', function (t) { +module.exports = function (t) { t.plan(27) if (/^v0\.8\./.test(process.version)) { @@ -112,4 +112,6 @@ test('pipe cleanup', function (t) { t.equal(w.listeners('end').length, 0) t.equal(w.listeners('close').length, 0) d.end() -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup' diff --git a/src/test/browser/test-stream-pipe-error-handling.js b/src/test/browser/test-stream-pipe-error-handling.js index 55455805a9..7cbfbcabb4 100644 --- a/src/test/browser/test-stream-pipe-error-handling.js +++ b/src/test/browser/test-stream-pipe-error-handling.js @@ -1,105 +1,111 @@ 'use strict' -const test = require('tape') const { Readable, Writable, Stream } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') -test('Error Listener Catches', function (t) { - t.plan(1) +module.exports = function (test) { + test('Error Listener Catches', function (t) { + t.plan(1) - const source = new Stream() - const dest = new Stream() + const source = new Stream() + const dest = new Stream() - source._read = function () {} - source.pipe(dest) + source._read = function () {} + source.pipe(dest) - let gotErr = null - source.on('error', function (err) { - gotErr = err - }) - - const err = new Error('This stream turned into bacon.') - source.emit('error', err) - t.strictEqual(gotErr, err) -}) - -test('Error WithoutListener Throws', function (t) { - t.plan(1) - - const source = new Stream() - const dest = new Stream() - - source._read = function () {} - source.pipe(dest) - - const err = new Error('This stream turned into bacon.') + let gotErr = null + source.on('error', function (err) { + gotErr = err + }) - let gotErr = null - try { + const err = new Error('This stream turned into bacon.') source.emit('error', err) - } catch (e) { - gotErr = e - } + t.strictEqual(gotErr, err) + }) - t.strictEqual(gotErr, err) -}) + test('Error WithoutListener Throws', function (t) { + t.plan(1) -test('Error With Removed Listener Throws', function (t) { - t.plan(2) + const source = new Stream() + const dest = new Stream() - const onerror = global.onerror + source._read = function () {} + source.pipe(dest) - const r = new Readable() - const w = new Writable() - let removed = false - let caught = false + const err = new Error('This stream turned into bacon.') - global.onerror = () => { - t.notOk(caught) - global.onerror = onerror - } + let gotErr = null + try { + source.emit('error', err) + } catch (e) { + gotErr = e + } - r._read = function () { - setTimeout(function () { - t.ok(removed) - w.emit('error', new Error('fail')) - }) - } - - w.on('error', myOnError) - r.pipe(w) - w.removeListener('error', myOnError) - removed = true - - function myOnError(er) { - caught = true - } -}) - -test('Error Listener Catches When Wrong Listener Is Removed', function (t) { - t.plan(2) - - const r = new Readable() - const w = new Writable() - let removed = false - let caught = false - - r._read = function () { - setTimeout(function () { - t.ok(removed) - w.emit('error', new Error('fail')) - }) - } + t.strictEqual(gotErr, err) + }) - w.on('error', myOnError) - w._write = function () {} + test('Error With Removed Listener Throws', function (t) { + t.plan(2) + + const onerror = global.onerror + + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + global.onerror = () => { + t.notOk(caught) + global.onerror = onerror + return true + } + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) + r.pipe(w) + w.removeListener('error', myOnError) + removed = true + + function myOnError(er) { + caught = true + } + }) - r.pipe(w) - // Removing some OTHER random listener should not do anything - w.removeListener('error', function () {}) - removed = true + test('Error Listener Catches When Wrong Listener Is Removed', function (t) { + t.plan(2) + + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) + w._write = function () {} + + r.pipe(w) + // Removing some OTHER random listener should not do anything + w.removeListener('error', function () {}) + removed = true + + function myOnError(er) { + t.notOk(caught) + caught = true + } + }) +} - function myOnError(er) { - t.notOk(caught) - caught = true - } -}) +module.exports[kReadableStreamSuiteName] = 'stream-pipe-error-handling' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream-pipe-event.js b/src/test/browser/test-stream-pipe-event.js index 272a23cbca..c03180c20d 100644 --- a/src/test/browser/test-stream-pipe-event.js +++ b/src/test/browser/test-stream-pipe-event.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Stream } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pipe event', function (t) { +module.exports = function (t) { t.plan(1) function Writable() { @@ -31,4 +31,6 @@ test('pipe event', function (t) { r.pipe(w) t.ok(passed) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-event' diff --git a/src/test/browser/test-stream-pipe-without-listenerCount.js b/src/test/browser/test-stream-pipe-without-listenerCount.js index 36fa85e462..1e8238cd8b 100644 --- a/src/test/browser/test-stream-pipe-without-listenerCount.js +++ b/src/test/browser/test-stream-pipe-without-listenerCount.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Stream } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pipe without listenerCount on read', function (t) { +module.exports = function (t) { t.plan(1) const r = new Stream({ @@ -17,4 +17,6 @@ test('pipe without listenerCount on read', function (t) { }) t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function') -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-without-listenerCount' diff --git a/src/test/browser/test-stream-pipeline.js b/src/test/browser/test-stream-pipeline.js index 26bba1a1c4..232f336eb6 100644 --- a/src/test/browser/test-stream-pipeline.js +++ b/src/test/browser/test-stream-pipeline.js @@ -1,109 +1,114 @@ 'use strict' -const test = require('tape') const { Readable, Writable, pipeline } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') -test('pipeline', function (t) { - t.plan(3) +module.exports = function (test) { + test('pipeline', function (t) { + t.plan(3) - let finished = false + let finished = false - const processed = [] - const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] + const processed = [] + const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] - const read = new Readable({ - read: function read() {} - }) + const read = new Readable({ + read: function read() {} + }) - const write = new Writable({ - write: function write(data, enc, cb) { - processed.push(data) - cb() - } - }) + const write = new Writable({ + write: function write(data, enc, cb) { + processed.push(data) + cb() + } + }) - write.on('finish', function () { - finished = true - }) + write.on('finish', function () { + finished = true + }) - for (let i = 0; i < expected.length; i++) { - read.push(expected[i]) - } + for (let i = 0; i < expected.length; i++) { + read.push(expected[i]) + } - read.push(null) - pipeline(read, write, (err) => { - t.ifErr(err) - t.ok(finished) - t.deepEqual(processed, expected) + read.push(null) + pipeline(read, write, (err) => { + t.ifErr(err) + t.ok(finished) + t.deepEqual(processed, expected) + }) }) -}) -test('pipeline missing args', function (t) { - t.plan(3) + test('pipeline missing args', function (t) { + t.plan(3) - const _read = new Readable({ - read: function read() {} - }) + const _read = new Readable({ + read: function read() {} + }) - t.throws(function () { - pipeline(_read, function () {}) - }) + t.throws(function () { + pipeline(_read, function () {}) + }) - t.throws(function () { - pipeline(function () {}) - }) + t.throws(function () { + pipeline(function () {}) + }) - t.throws(function () { - pipeline() + t.throws(function () { + pipeline() + }) }) -}) -test('pipeline error', function (t) { - t.plan(1) + test('pipeline error', function (t) { + t.plan(1) - const _read2 = new Readable({ - read: function read() {} - }) + const _read2 = new Readable({ + read: function read() {} + }) - const _write = new Writable({ - write: function write(data, enc, cb) { - cb() - } - }) + const _write = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) - _read2.push('data') + _read2.push('data') - setImmediate(function () { - return _read2.destroy() - }) + setImmediate(function () { + return _read2.destroy() + }) - pipeline(_read2, _write, (err) => { - t.equal(err.message, 'Premature close') + pipeline(_read2, _write, (err) => { + t.equal(err.message, 'Premature close') + }) }) -}) -test('pipeline destroy', function (t) { - t.plan(2) + test('pipeline destroy', function (t) { + t.plan(2) - const _read3 = new Readable({ - read: function read() {} - }) + const _read3 = new Readable({ + read: function read() {} + }) - const _write2 = new Writable({ - write: function write(data, enc, cb) { - cb() - } - }) + const _write2 = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) - _read3.push('data') + _read3.push('data') - setImmediate(function () { - return _read3.destroy(new Error('kaboom')) - }) + setImmediate(function () { + return _read3.destroy(new Error('kaboom')) + }) + + const dst = pipeline(_read3, _write2, (err) => { + t.equal(err.message, 'kaboom') + }) - const dst = pipeline(_read3, _write2, (err) => { - t.equal(err.message, 'kaboom') + t.equal(dst, _write2) }) +} - t.equal(dst, _write2) -}) +module.exports[kReadableStreamSuiteName] = 'stream-pipeline' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream-push-order.js b/src/test/browser/test-stream-push-order.js index 6867c874cb..4afcf756bc 100644 --- a/src/test/browser/test-stream-push-order.js +++ b/src/test/browser/test-stream-push-order.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('push order', function (t) { +module.exports = function (t) { t.plan(1) const s = new Readable({ @@ -29,4 +29,6 @@ test('push order', function (t) { setTimeout(function () { t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6') }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-push-order' diff --git a/src/test/browser/test-stream-push-strings.js b/src/test/browser/test-stream-push-strings.js index d2bad89d61..bb254c5b39 100644 --- a/src/test/browser/test-stream-push-strings.js +++ b/src/test/browser/test-stream-push-strings.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('push strings', function (t) { +module.exports = function (t) { t.plan(2) function MyStream(options) { @@ -52,4 +52,6 @@ test('push strings', function (t) { t.equal(ms._chunks, -1) t.deepEqual(results, expect) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-push-strings' diff --git a/src/test/browser/test-stream-readable-constructor-set-methods.js b/src/test/browser/test-stream-readable-constructor-set-methods.js index 895dfd1d1a..6d4ff89359 100644 --- a/src/test/browser/test-stream-readable-constructor-set-methods.js +++ b/src/test/browser/test-stream-readable-constructor-set-methods.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('readable constructor set methods', function (t) { +module.exports = function (t) { t.plan(2) let _readCalled = false @@ -20,4 +20,6 @@ test('readable constructor set methods', function (t) { t.equal(r._read, _read) t.ok(_readCalled) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-readable-constructor-set-methods' diff --git a/src/test/browser/test-stream-readable-event.js b/src/test/browser/test-stream-readable-event.js index dc2a9ea65c..0c821409bf 100644 --- a/src/test/browser/test-stream-readable-event.js +++ b/src/test/browser/test-stream-readable-event.js @@ -1,105 +1,110 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') -test('readable events - first', (t) => { - t.plan(3) +module.exports = function (test) { + test('readable events - first', (t) => { + t.plan(3) - // First test, not reading when the readable is added. - // make sure that on('readable', ...) triggers a readable event. - const r = new Readable({ - highWaterMark: 3 - }) + // First test, not reading when the readable is added. + // make sure that on('readable', ...) triggers a readable event. + const r = new Readable({ + highWaterMark: 3 + }) - let _readCalled = false - r._read = function (n) { - _readCalled = true - } - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')) - - let caughtReadable = false - setTimeout(function () { - // we're testing what we think we are - t.notOk(r._readableState.reading) - r.on('readable', function () { - caughtReadable = true - setTimeout(function () { - // we're testing what we think we are - t.notOk(_readCalled) - - t.ok(caughtReadable) + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')) + + let caughtReadable = false + setTimeout(function () { + // we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + + t.ok(caughtReadable) + }) }) }) }) -}) -test('readable events - second', (t) => { - t.plan(3) + test('readable events - second', (t) => { + t.plan(3) - // second test, make sure that readable is re-emitted if there's - // already a length, while it IS reading. + // second test, make sure that readable is re-emitted if there's + // already a length, while it IS reading. - const r = new Readable({ - highWaterMark: 3 - }) + const r = new Readable({ + highWaterMark: 3 + }) - let _readCalled = false - r._read = function (n) { - _readCalled = true - } - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('bl')) - - let caughtReadable = false - setTimeout(function () { - // assert we're testing what we think we are - t.ok(r._readableState.reading) - r.on('readable', function () { - caughtReadable = true - setTimeout(function () { - // we're testing what we think we are - t.ok(_readCalled) - - t.ok(caughtReadable) + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('bl')) + + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.ok(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.ok(_readCalled) + + t.ok(caughtReadable) + }) }) }) }) -}) -test('readable events - third', (t) => { - t.plan(3) + test('readable events - third', (t) => { + t.plan(3) - // Third test, not reading when the stream has not passed - // the highWaterMark but *has* reached EOF. - const r = new Readable({ - highWaterMark: 30 - }) + // Third test, not reading when the stream has not passed + // the highWaterMark but *has* reached EOF. + const r = new Readable({ + highWaterMark: 30 + }) - let _readCalled = false - r._read = function (n) { - _readCalled = true - } - - // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')) - r.push(null) - - let caughtReadable = false - setTimeout(function () { - // assert we're testing what we think we are - t.notOk(r._readableState.reading) - r.on('readable', function () { - caughtReadable = true - setTimeout(function () { - // we're testing what we think we are - t.notOk(_readCalled) - - t.ok(caughtReadable) + let _readCalled = false + r._read = function (n) { + _readCalled = true + } + + // This triggers a 'readable' event, which is lost. + r.push(Buffer.from('blerg')) + r.push(null) + + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + + t.ok(caughtReadable) + }) }) }) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-readable-event' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream-sync-write.js b/src/test/browser/test-stream-sync-write.js index 06fbebab84..dd3a1b2539 100644 --- a/src/test/browser/test-stream-sync-write.js +++ b/src/test/browser/test-stream-sync-write.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('should bea ble to write sync', function (t) { +module.exports = function (t) { t.plan(2) let internalCalls = 0 @@ -43,4 +43,6 @@ test('should bea ble to write sync', function (t) { t.equal(internalCalls, 2000) t.equal(externalCalls, 2000) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-sync-write' diff --git a/src/test/browser/test-stream-transform-constructor-set-methods.js b/src/test/browser/test-stream-transform-constructor-set-methods.js index 2ce4a0ea08..c64df97dcb 100644 --- a/src/test/browser/test-stream-transform-constructor-set-methods.js +++ b/src/test/browser/test-stream-transform-constructor-set-methods.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Transform } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('transform constructor set methods', function (t) { +module.exports = function (t) { t.plan(4) let _transformCalled = false @@ -32,4 +32,6 @@ test('transform constructor set methods', function (t) { t.ok(_transformCalled) t.ok(_flushCalled) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-transform-constructor-set-methods' diff --git a/src/test/browser/test-stream-transform-objectmode-falsey-value.js b/src/test/browser/test-stream-transform-objectmode-falsey-value.js index bd2359bb51..69173cce31 100644 --- a/src/test/browser/test-stream-transform-objectmode-falsey-value.js +++ b/src/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { PassThrough } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('transform objectmode falsey value', function (t) { +module.exports = function (t) { t.plan(13) const src = new PassThrough({ objectMode: true }) @@ -32,4 +32,6 @@ test('transform objectmode falsey value', function (t) { src.write(i++) } }, 10) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-transform-objectmode-falsey-value' diff --git a/src/test/browser/test-stream-transform-split-objectmode.js b/src/test/browser/test-stream-transform-split-objectmode.js index 21515858ca..e50ac2c251 100644 --- a/src/test/browser/test-stream-transform-split-objectmode.js +++ b/src/test/browser/test-stream-transform-split-objectmode.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Transform } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('transform split objectmode', function (t) { +module.exports = function (t) { t.plan(10) const parser = new Transform({ readableObjectMode: true }) @@ -54,4 +54,6 @@ test('transform split objectmode', function (t) { setImmediate(function () { serializer.end() }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-transform-split-objectmode' diff --git a/src/test/browser/test-stream-unshift-empty-chunk.js b/src/test/browser/test-stream-unshift-empty-chunk.js index 6580116231..2ebbd20930 100644 --- a/src/test/browser/test-stream-unshift-empty-chunk.js +++ b/src/test/browser/test-stream-unshift-empty-chunk.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('unshift empty chunk', function (t) { +module.exports = function (t) { t.plan(1) const r = new Readable() @@ -59,4 +59,6 @@ test('unshift empty chunk', function (t) { r.on('end', function () { t.deepEqual(seen, expect) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-unshift-empty-chunk' diff --git a/src/test/browser/test-stream-unshift-read-race.js b/src/test/browser/test-stream-unshift-read-race.js index d7e17d143e..a600fe1cd4 100644 --- a/src/test/browser/test-stream-unshift-read-race.js +++ b/src/test/browser/test-stream-unshift-read-race.js @@ -7,10 +7,10 @@ // 3. push() after the EOF signaling null is an error. // 4. _read() is not called after pushing the EOF null chunk. -const test = require('tape') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('unshift read race', function (t) { +module.exports = function (t) { t.plan(139) const hwm = 10 @@ -58,9 +58,10 @@ test('unshift read race', function (t) { w.end() const onerror = global.onerror - global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { + global.onerror = () => { t.ok(true) global.onerror = onerror + return true } r.push(Buffer.allocUnsafe(1)) @@ -118,4 +119,6 @@ test('unshift read race', function (t) { t.equal(written.length, 18) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-unshift-read-race' diff --git a/src/test/browser/test-stream-writable-change-default-encoding.js b/src/test/browser/test-stream-writable-change-default-encoding.js index ef8ec00184..3cfa208e41 100644 --- a/src/test/browser/test-stream-writable-change-default-encoding.js +++ b/src/test/browser/test-stream-writable-change-default-encoding.js @@ -1,8 +1,8 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') inherits(MyWritable, stream.Writable) @@ -16,54 +16,59 @@ function MyWritable(fn, options) { this.fn = fn } -test('defaultCondingIsUtf8', (t) => { - t.plan(1) +module.exports = function (test) { + test('defaultCondingIsUtf8', (t) => { + t.plan(1) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.equal(enc, 'utf8') - }, - { decodeStrings: false } - ) - m.write('foo') - m.end() -}) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'utf8') + }, + { decodeStrings: false } + ) + m.write('foo') + m.end() + }) + + test('changeDefaultEncodingToAscii', (t) => { + t.plan(1) -test('changeDefaultEncodingToAscii', (t) => { - t.plan(1) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { decodeStrings: false } + ) + m.setDefaultEncoding('ascii') + m.write('bar') + m.end() + }) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.equal(enc, 'ascii') - }, - { decodeStrings: false } - ) - m.setDefaultEncoding('ascii') - m.write('bar') - m.end() -}) + test('changeDefaultEncodingToInvalidValue', (t) => { + t.plan(1) -test('changeDefaultEncodingToInvalidValue', (t) => { - t.plan(1) + t.throws(function () { + const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false }) + m.setDefaultEncoding({}) + m.write('bar') + m.end() + }, TypeError) + }) - t.throws(function () { - const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false }) - m.setDefaultEncoding({}) + test('checkVairableCaseEncoding', (t) => { + t.plan(1) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { decodeStrings: false } + ) + m.setDefaultEncoding('AsCii') m.write('bar') m.end() - }, TypeError) -}) - -test('checkVairableCaseEncoding', (t) => { - t.plan(1) + }) +} - const m = new MyWritable( - function (isBuffer, type, enc) { - t.equal(enc, 'ascii') - }, - { decodeStrings: false } - ) - m.setDefaultEncoding('AsCii') - m.write('bar') - m.end() -}) +module.exports[kReadableStreamSuiteName] = 'stream-writable-change-default-encoding' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream-writable-constructor-set-methods.js b/src/test/browser/test-stream-writable-constructor-set-methods.js index 7eb357e6e6..43d935b815 100644 --- a/src/test/browser/test-stream-writable-constructor-set-methods.js +++ b/src/test/browser/test-stream-writable-constructor-set-methods.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('writable constructor set methods', function (t) { +module.exports = function (t) { t.plan(5) let _writeCalled = false @@ -35,4 +35,6 @@ test('writable constructor set methods', function (t) { t.equal(dLength, 2) t.ok(_writevCalled) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-writable-constructor-set-methods' diff --git a/src/test/browser/test-stream-writable-decoded-encoding.js b/src/test/browser/test-stream-writable-decoded-encoding.js index 608e7a89fc..628349c9e0 100644 --- a/src/test/browser/test-stream-writable-decoded-encoding.js +++ b/src/test/browser/test-stream-writable-decoded-encoding.js @@ -1,8 +1,8 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') function MyWritable(fn, options) { stream.Writable.call(this, options) @@ -16,34 +16,39 @@ MyWritable.prototype._write = function (chunk, encoding, callback) { callback() } -test('decodeStringsTrue', (t) => { - t.plan(3) - - const m = new MyWritable( - function (isBuffer, type, enc) { - t.ok(isBuffer) - t.equal(type, 'object') - t.equal(enc, 'buffer') - // console.log('ok - decoded string is decoded'); - }, - { decodeStrings: true } - ) - m.write('some-text', 'utf8') - m.end() -}) - -test('decodeStringsFalse', (t) => { - t.plan(3) - - const m = new MyWritable( - function (isBuffer, type, enc) { - t.notOk(isBuffer) - t.equal(type, 'string') - t.equal(enc, 'utf8') - // console.log('ok - un-decoded string is not decoded'); - }, - { decodeStrings: false } - ) - m.write('some-text', 'utf8') - m.end() -}) +module.exports = function (test) { + test('decodeStringsTrue', (t) => { + t.plan(3) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.ok(isBuffer) + t.equal(type, 'object') + t.equal(enc, 'buffer') + // console.log('ok - decoded string is decoded'); + }, + { decodeStrings: true } + ) + m.write('some-text', 'utf8') + m.end() + }) + + test('decodeStringsFalse', (t) => { + t.plan(3) + + const m = new MyWritable( + function (isBuffer, type, enc) { + t.notOk(isBuffer) + t.equal(type, 'string') + t.equal(enc, 'utf8') + // console.log('ok - un-decoded string is not decoded'); + }, + { decodeStrings: false } + ) + m.write('some-text', 'utf8') + m.end() + }) +} + +module.exports[kReadableStreamSuiteName] = 'stream-writable-decoded-encoding' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream-writev.js b/src/test/browser/test-stream-writev.js index 66022703a2..e072bc2388 100644 --- a/src/test/browser/test-stream-writev.js +++ b/src/test/browser/test-stream-writev.js @@ -1,7 +1,7 @@ 'use strict' -const test = require('tape') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') const queue = [] for (let decode = 0; decode < 2; decode++) { @@ -94,8 +94,13 @@ function runTest(decode, uncork, multi) { } } -for (let i = 0; i < queue.length; i++) { - const tr = queue[i] +module.exports = function (test) { + for (let i = 0; i < queue.length; i++) { + const tr = queue[i] - test('round ' + i, runTest(tr[0], tr[1], tr[2])) + test('round ' + i, runTest(tr[0], tr[1], tr[2])) + } } + +module.exports[kReadableStreamSuiteName] = 'stream-writev' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-base64-single-char-read-end.js b/src/test/browser/test-stream2-base64-single-char-read-end.js index 6e09201f67..5b7c131d52 100644 --- a/src/test/browser/test-stream2-base64-single-char-read-end.js +++ b/src/test/browser/test-stream2-base64-single-char-read-end.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('base64 single char read end', function (t) { +module.exports = function (t) { t.plan(1) const src = new Readable({ encoding: 'base64' }) @@ -36,4 +36,6 @@ test('base64 single char read end', function (t) { const timeout = setTimeout(function () { t.fail('timed out waiting for _write') }, 100) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-base64-single-char-read-end' diff --git a/src/test/browser/test-stream2-compatibility.js b/src/test/browser/test-stream2-compatibility.js index bed96d8903..6709029562 100644 --- a/src/test/browser/test-stream2-compatibility.js +++ b/src/test/browser/test-stream2-compatibility.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('compatibility', function (t) { +module.exports = function (t) { t.plan(1) let ondataCalled = 0 @@ -31,4 +31,6 @@ test('compatibility', function (t) { }) new TestReader().read() -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-compatibility' diff --git a/src/test/browser/test-stream2-large-read-stall.js b/src/test/browser/test-stream2-large-read-stall.js index cee0f9ca06..17bb7fb2b9 100644 --- a/src/test/browser/test-stream2-large-read-stall.js +++ b/src/test/browser/test-stream2-large-read-stall.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('large object read stall', function (t) { +module.exports = function (t) { t.plan(1) // If everything aligns so that you do a read(n) of exactly the @@ -58,4 +58,6 @@ test('large object read stall', function (t) { // start the flow r.read(0) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-large-read-stall' diff --git a/src/test/browser/test-stream2-objects.js b/src/test/browser/test-stream2-objects.js index 986d9d57dc..c939b07fe6 100644 --- a/src/test/browser/test-stream2-objects.js +++ b/src/test/browser/test-stream2-objects.js @@ -1,7 +1,13 @@ 'use strict' -const test = require('tape') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} function toArray(callback) { const stream = new Writable({ objectMode: true }) @@ -30,275 +36,274 @@ function fromArray(list) { function noop() {} -test('can read objects from stream', function (t) { - t.plan(3) - - const r = fromArray([{ one: '1' }, { two: '2' }]) - - const v1 = r.read() - const v2 = r.read() - const v3 = r.read() - - t.deepEqual(v1, { one: '1' }) - t.deepEqual(v2, { two: '2' }) - t.deepEqual(v3, null) -}) +module.exports = function (test) { + test('can read objects from stream', function (t) { + t.plan(3) -test('can pipe objects into stream', function (t) { - t.plan(1) + const r = fromArray([{ one: '1' }, { two: '2' }]) - const r = fromArray([{ one: '1' }, { two: '2' }]) + const v1 = r.read() + const v2 = r.read() + const v3 = r.read() - r.pipe( - toArray(function (list) { - t.deepEqual(list, [{ one: '1' }, { two: '2' }]) - }) - ) -}) + t.deepEqual(v1, { one: '1' }) + t.deepEqual(v2, { two: '2' }) + t.deepEqual(v3, null) + }) -test('read(n) is ignored', function (t) { - t.plan(1) + test('can pipe objects into stream', function (t) { + t.plan(1) - const r = fromArray([{ one: '1' }, { two: '2' }]) + const r = fromArray([{ one: '1' }, { two: '2' }]) - const value = r.read(2) + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) + }) - t.deepEqual(value, { one: '1' }) -}) + test('read(n) is ignored', function (t) { + t.plan(1) -test('can read objects from _read (sync)', function (t) { - t.plan(1) + const r = fromArray([{ one: '1' }, { two: '2' }]) - const r = new Readable({ objectMode: true }) - const list = [{ one: '1' }, { two: '2' }] - r._read = function (n) { - const item = list.shift() - r.push(item || null) - } + const value = r.read(2) - r.pipe( - toArray(function (list) { - t.deepEqual(list, [{ one: '1' }, { two: '2' }]) - }) - ) -}) + t.deepEqual(value, { one: '1' }) + }) -test('can read objects from _read (async)', function (t) { - t.plan(1) + test('can read objects from _read (sync)', function (t) { + t.plan(1) - const r = new Readable({ objectMode: true }) - const list = [{ one: '1' }, { two: '2' }] - r._read = function (n) { - const item = list.shift() - process.nextTick(function () { + const r = new Readable({ objectMode: true }) + const list = [{ one: '1' }, { two: '2' }] + r._read = function (n) { + const item = list.shift() r.push(item || null) - }) - } - - r.pipe( - toArray(function (list) { - t.deepEqual(list, [{ one: '1' }, { two: '2' }]) - }) - ) -}) + } -test('can read strings as objects', function (t) { - t.plan(1) - - const r = new Readable({ - objectMode: true + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) }) - r._read = noop - const list = ['one', 'two', 'three'] - forEach(list, function (str) { - r.push(str) + + test('can read objects from _read (async)', function (t) { + t.plan(1) + + const r = new Readable({ objectMode: true }) + const list = [{ one: '1' }, { two: '2' }] + r._read = function (n) { + const item = list.shift() + process.nextTick(function () { + r.push(item || null) + }) + } + + r.pipe( + toArray(function (list) { + t.deepEqual(list, [{ one: '1' }, { two: '2' }]) + }) + ) }) - r.push(null) - r.pipe( - toArray(function (array) { - t.deepEqual(array, list) - }) - ) -}) + test('can read strings as objects', function (t) { + t.plan(1) -test('read(0) for object streams', function (t) { - t.plan(1) + const r = new Readable({ + objectMode: true + }) + r._read = noop + const list = ['one', 'two', 'three'] + forEach(list, function (str) { + r.push(str) + }) + r.push(null) - const r = new Readable({ - objectMode: true + r.pipe( + toArray(function (array) { + t.deepEqual(array, list) + }) + ) }) - r._read = noop - - r.push('foobar') - r.push(null) - r.read(0) + test('read(0) for object streams', function (t) { + t.plan(1) - r.pipe( - toArray(function (array) { - t.deepEqual(array, ['foobar']) + const r = new Readable({ + objectMode: true }) - ) -}) + r._read = noop -test('falsey values', function (t) { - t.plan(1) + r.push('foobar') + r.push(null) - const r = new Readable({ - objectMode: true + r.read(0) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, ['foobar']) + }) + ) }) - r._read = noop - r.push(false) - r.push(0) - r.push('') - r.push(null) + test('falsey values', function (t) { + t.plan(1) - r.pipe( - toArray(function (array) { - t.deepEqual(array, [false, 0, '']) + const r = new Readable({ + objectMode: true }) - ) -}) - -test('high watermark _read', function (t) { - t.plan(5) - - const r = new Readable({ - highWaterMark: 6, - objectMode: true + r._read = noop + + r.push(false) + r.push(0) + r.push('') + r.push(null) + + r.pipe( + toArray(function (array) { + t.deepEqual(array, [false, 0, '']) + }) + ) }) - let calls = 0 - const list = ['1', '2', '3', '4', '5', '6', '7', '8'] - r._read = function (n) { - calls++ - } + test('high watermark _read', function (t) { + t.plan(5) - forEach(list, function (c) { - r.push(c) - }) + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) + let calls = 0 + const list = ['1', '2', '3', '4', '5', '6', '7', '8'] - const v = r.read() + r._read = function (n) { + calls++ + } - t.equal(calls, 0) - t.equal(v, '1') + forEach(list, function (c) { + r.push(c) + }) - const v2 = r.read() - t.equal(v2, '2') + const v = r.read() - const v3 = r.read() - t.equal(v3, '3') + t.equal(calls, 0) + t.equal(v, '1') - t.equal(calls, 1) -}) + const v2 = r.read() + t.equal(v2, '2') -test('high watermark push', function (t) { - t.plan(6) + const v3 = r.read() + t.equal(v3, '3') - const r = new Readable({ - highWaterMark: 6, - objectMode: true + t.equal(calls, 1) }) - r._read = function (n) {} - for (let i = 0; i < 6; i++) { - const bool = r.push(i) - t.equal(bool, i !== 5) - } -}) -test('can write objects to stream', function (t) { - t.plan(1) + test('high watermark push', function (t) { + t.plan(6) - const w = new Writable({ objectMode: true }) + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) + r._read = function (n) {} + for (let i = 0; i < 6; i++) { + const bool = r.push(i) + t.equal(bool, i !== 5) + } + }) - w._write = function (chunk, encoding, cb) { - t.deepEqual(chunk, { foo: 'bar' }) - cb() - } + test('can write objects to stream', function (t) { + t.plan(1) - w.on('finish', function () {}) + const w = new Writable({ objectMode: true }) - w.write({ foo: 'bar' }) - w.end() -}) + w._write = function (chunk, encoding, cb) { + t.deepEqual(chunk, { foo: 'bar' }) + cb() + } -test('can write multiple objects to stream', function (t) { - t.plan(1) + w.on('finish', function () {}) - const w = new Writable({ objectMode: true }) - const list = [] + w.write({ foo: 'bar' }) + w.end() + }) - w._write = function (chunk, encoding, cb) { - list.push(chunk) - cb() - } + test('can write multiple objects to stream', function (t) { + t.plan(1) - w.on('finish', function () { - t.deepEqual(list, [0, 1, 2, 3, 4]) - }) + const w = new Writable({ objectMode: true }) + const list = [] - w.write(0) - w.write(1) - w.write(2) - w.write(3) - w.write(4) - w.end() -}) + w._write = function (chunk, encoding, cb) { + list.push(chunk) + cb() + } -test('can write strings as objects', function (t) { - t.plan(1) + w.on('finish', function () { + t.deepEqual(list, [0, 1, 2, 3, 4]) + }) - const w = new Writable({ - objectMode: true + w.write(0) + w.write(1) + w.write(2) + w.write(3) + w.write(4) + w.end() }) - const list = [] - w._write = function (chunk, encoding, cb) { - list.push(chunk) - process.nextTick(cb) - } + test('can write strings as objects', function (t) { + t.plan(1) - w.on('finish', function () { - t.deepEqual(list, ['0', '1', '2', '3', '4']) - }) + const w = new Writable({ + objectMode: true + }) + const list = [] - w.write('0') - w.write('1') - w.write('2') - w.write('3') - w.write('4') - w.end() -}) + w._write = function (chunk, encoding, cb) { + list.push(chunk) + process.nextTick(cb) + } -test('buffers finish until cb is called', function (t) { - t.plan(2) + w.on('finish', function () { + t.deepEqual(list, ['0', '1', '2', '3', '4']) + }) - const w = new Writable({ - objectMode: true + w.write('0') + w.write('1') + w.write('2') + w.write('3') + w.write('4') + w.end() }) - let called = false - w._write = function (chunk, encoding, cb) { - t.equal(chunk, 'foo') + test('buffers finish until cb is called', function (t) { + t.plan(2) - process.nextTick(function () { - called = true - cb() + const w = new Writable({ + objectMode: true }) - } + let called = false - w.on('finish', function () { - t.equal(called, true) - }) + w._write = function (chunk, encoding, cb) { + t.equal(chunk, 'foo') - w.write('foo') - w.end() -}) + process.nextTick(function () { + called = true + cb() + }) + } -function forEach(xs, f) { - for (let i = 0, l = xs.length; i < l; i++) { - f(xs[i], i) - } + w.on('finish', function () { + t.equal(called, true) + }) + + w.write('foo') + w.end() + }) } + +module.exports[kReadableStreamSuiteName] = 'stream2-objects' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-pipe-error-handling.js b/src/test/browser/test-stream2-pipe-error-handling.js index d198505229..e830b9b415 100644 --- a/src/test/browser/test-stream2-pipe-error-handling.js +++ b/src/test/browser/test-stream2-pipe-error-handling.js @@ -1,89 +1,95 @@ 'use strict' -const test = require('tape') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +module.exports = function (test) { + test('Error Listener Catches', function (t) { + t.plan(3) + + let count = 1000 + + const source = new stream.Readable() + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + + let gotErr = null + dest.on('error', function (err) { + gotErr = err + }) + + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + + const err = new Error('This stream turned into bacon.') + dest.emit('error', err) + t.strictEqual(gotErr, err) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) + }) -test('Error Listener Catches', function (t) { - t.plan(3) - - let count = 1000 + test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { + t.plan(3) - const source = new stream.Readable() - source._read = function (n) { - n = Math.min(count, n) - count -= n - source.push(Buffer.alloc(n)) - } + let count = 1000 - let unpipedDest - source.unpipe = function (dest) { - unpipedDest = dest - stream.Readable.prototype.unpipe.call(this, dest) - } + const source = new stream.Readable() + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } - const dest = new stream.Writable() - dest._write = function (chunk, encoding, cb) { - cb() - } + let unpipedDest + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } - source.pipe(dest) + const dest = new stream.Writable() + dest._write = function (chunk, encoding, cb) { + cb() + } - let gotErr = null - dest.on('error', function (err) { - gotErr = err - }) + source.pipe(dest) - let unpipedSource - dest.on('unpipe', function (src) { - unpipedSource = src - }) + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) - const err = new Error('This stream turned into bacon.') - dest.emit('error', err) - t.strictEqual(gotErr, err) - t.strictEqual(unpipedSource, source) - t.strictEqual(unpipedDest, dest) -}) - -test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { - t.plan(3) - - let count = 1000 - - const source = new stream.Readable() - source._read = function (n) { - n = Math.min(count, n) - count -= n - source.push(Buffer.alloc(n)) - } - - let unpipedDest - source.unpipe = function (dest) { - unpipedDest = dest - stream.Readable.prototype.unpipe.call(this, dest) - } - - const dest = new stream.Writable() - dest._write = function (chunk, encoding, cb) { - cb() - } - - source.pipe(dest) - - let unpipedSource - dest.on('unpipe', function (src) { - unpipedSource = src - }) + const err = new Error('This stream turned into bacon.') + const onerror = global.onerror - const err = new Error('This stream turned into bacon.') - const onerror = global.onerror + dest.emit('error', err) - dest.emit('error', err) + global.onerror = () => { + t.ok(true) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) + global.onerror = onerror + return true + } + }) +} - global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { - t.strictEqual(gotErr, err) - t.strictEqual(unpipedSource, source) - t.strictEqual(unpipedDest, dest) - global.onerror = onerror - } -}) +module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-handling' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-pipe-error-once-listener.js b/src/test/browser/test-stream2-pipe-error-once-listener.js index 071295d03a..230da9ad42 100644 --- a/src/test/browser/test-stream2-pipe-error-once-listener.js +++ b/src/test/browser/test-stream2-pipe-error-once-listener.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pipe error once listener', function (t) { +module.exports = function (t) { t.plan(1) const Read = function () { @@ -36,4 +36,6 @@ test('pipe error once listener', function (t) { }) read.pipe(write) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-once-listener' diff --git a/src/test/browser/test-stream2-push.js b/src/test/browser/test-stream2-push.js index f826852c46..ce2916aaa1 100644 --- a/src/test/browser/test-stream2-push.js +++ b/src/test/browser/test-stream2-push.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const { EventEmitter: EE } = require('events') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('push', function (t) { +module.exports = function (t) { t.plan(33) const stream = new Readable({ @@ -114,4 +114,6 @@ test('push', function (t) { t.ok(ended) }) } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-push' diff --git a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js index 794f43d8d5..35e27a2f2f 100644 --- a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js +++ b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -1,93 +1,98 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') -test('readable empty buffer no eof 1', function (t) { - t.plan(1) +module.exports = function (test) { + test('readable empty buffer no eof 1', function (t) { + t.plan(1) - const r = new Readable() + const r = new Readable() - // should not end when we get a Buffer(0) or '' as the _read result - // that just means that there is *temporarily* no data, but to go - // ahead and try again later. - // - // note that this is very unusual. it only works for crypto streams - // because the other side of the stream will call read(0) to cycle - // data through openssl. that's why we set the timeouts to call - // r.read(0) again later, otherwise there is no more work being done - // and the process just exits. + // should not end when we get a Buffer(0) or '' as the _read result + // that just means that there is *temporarily* no data, but to go + // ahead and try again later. + // + // note that this is very unusual. it only works for crypto streams + // because the other side of the stream will call read(0) to cycle + // data through openssl. that's why we set the timeouts to call + // r.read(0) again later, otherwise there is no more work being done + // and the process just exits. - const buf = Buffer.alloc(5) - buf.fill('x') - let reads = 5 - r._read = function (n) { - switch (reads--) { - case 0: - return r.push(null) // EOF - case 1: - return r.push(buf) - case 2: - setTimeout(r.read.bind(r, 0), 50) - return r.push(Buffer.alloc(0)) // Not-EOF! - case 3: - setTimeout(r.read.bind(r, 0), 50) - return process.nextTick(function () { - return r.push(Buffer.alloc(0)) - }) - case 4: - setTimeout(r.read.bind(r, 0), 50) - return setTimeout(function () { - return r.push(Buffer.alloc(0)) - }) - case 5: - return setTimeout(function () { + const buf = Buffer.alloc(5) + buf.fill('x') + let reads = 5 + r._read = function (n) { + switch (reads--) { + case 0: + return r.push(null) // EOF + case 1: return r.push(buf) - }) - default: - throw new Error('unreachable') + case 2: + setTimeout(r.read.bind(r, 0), 50) + return r.push(Buffer.alloc(0)) // Not-EOF! + case 3: + setTimeout(r.read.bind(r, 0), 50) + return process.nextTick(function () { + return r.push(Buffer.alloc(0)) + }) + case 4: + setTimeout(r.read.bind(r, 0), 50) + return setTimeout(function () { + return r.push(Buffer.alloc(0)) + }) + case 5: + return setTimeout(function () { + return r.push(buf) + }) + default: + throw new Error('unreachable') + } } - } - const results = [] - function flow() { - let chunk - while ((chunk = r.read()) !== null) { - results.push(chunk + '') + const results = [] + function flow() { + let chunk + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } } - } - r.on('readable', flow) - r.on('end', function () { - results.push('EOF') - t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']) + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']) + }) + flow() }) - flow() -}) -test('readable empty buffer no eof 2', function (t) { - t.plan(1) + test('readable empty buffer no eof 2', function (t) { + t.plan(1) - const r = new Readable({ encoding: 'base64' }) - let reads = 5 - r._read = function (n) { - if (!reads--) { - return r.push(null) // EOF - } else { - return r.push(Buffer.from('x')) + const r = new Readable({ encoding: 'base64' }) + let reads = 5 + r._read = function (n) { + if (!reads--) { + return r.push(null) // EOF + } else { + return r.push(Buffer.from('x')) + } } - } - const results = [] - function flow() { - let chunk - while ((chunk = r.read()) !== null) { - results.push(chunk + '') + const results = [] + function flow() { + let chunk + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } } - } - r.on('readable', flow) - r.on('end', function () { - results.push('EOF') - t.deepEqual(results, ['eHh4', 'eHg=', 'EOF']) + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['eHh4', 'eHg=', 'EOF']) + }) + flow() }) - flow() -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-empty-buffer-no-eof' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-readable-from-list.js b/src/test/browser/test-stream2-readable-from-list.js index 2bc0809c07..f71984e9c4 100644 --- a/src/test/browser/test-stream2-readable-from-list.js +++ b/src/test/browser/test-stream2-readable-from-list.js @@ -1,8 +1,8 @@ 'use strict' -const test = require('tape') const { _fromList: fromList } = require('../../lib/_stream_readable') const BufferList = require('../../lib/internal/streams/buffer_list') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') function bufferListFromArray(arr) { const bl = new BufferList() @@ -12,54 +12,59 @@ function bufferListFromArray(arr) { return bl } -test('buffers', function (t) { - t.plan(5) +module.exports = function (test) { + test('buffers', function (t) { + t.plan(5) - let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] - list = bufferListFromArray(list) + let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] + list = bufferListFromArray(list) - // read more than the first element. - let ret = fromList(6, { buffer: list, length: 16 }) - t.equal(ret.toString(), 'foogba') + // read more than the first element. + let ret = fromList(6, { buffer: list, length: 16 }) + t.equal(ret.toString(), 'foogba') - // read exactly the first element. - ret = fromList(2, { buffer: list, length: 10 }) - t.equal(ret.toString(), 'rk') + // read exactly the first element. + ret = fromList(2, { buffer: list, length: 10 }) + t.equal(ret.toString(), 'rk') - // read less than the first element. - ret = fromList(2, { buffer: list, length: 8 }) - t.equal(ret.toString(), 'ba') + // read less than the first element. + ret = fromList(2, { buffer: list, length: 8 }) + t.equal(ret.toString(), 'ba') - // read more than we have. - ret = fromList(100, { buffer: list, length: 6 }) - t.equal(ret.toString(), 'zykuel') + // read more than we have. + ret = fromList(100, { buffer: list, length: 6 }) + t.equal(ret.toString(), 'zykuel') - // all consumed. - t.same(list, new BufferList()) -}) + // all consumed. + t.same(list, new BufferList()) + }) -test('strings', function (t) { - t.plan(5) + test('strings', function (t) { + t.plan(5) - let list = ['foog', 'bark', 'bazy', 'kuel'] - list = bufferListFromArray(list) + let list = ['foog', 'bark', 'bazy', 'kuel'] + list = bufferListFromArray(list) - // read more than the first element. - let ret = fromList(6, { buffer: list, length: 16, decoder: true }) - t.equal(ret, 'foogba') + // read more than the first element. + let ret = fromList(6, { buffer: list, length: 16, decoder: true }) + t.equal(ret, 'foogba') - // read exactly the first element. - ret = fromList(2, { buffer: list, length: 10, decoder: true }) - t.equal(ret, 'rk') + // read exactly the first element. + ret = fromList(2, { buffer: list, length: 10, decoder: true }) + t.equal(ret, 'rk') - // read less than the first element. - ret = fromList(2, { buffer: list, length: 8, decoder: true }) - t.equal(ret, 'ba') + // read less than the first element. + ret = fromList(2, { buffer: list, length: 8, decoder: true }) + t.equal(ret, 'ba') - // read more than we have. - ret = fromList(100, { buffer: list, length: 6, decoder: true }) - t.equal(ret, 'zykuel') + // read more than we have. + ret = fromList(100, { buffer: list, length: 6, decoder: true }) + t.equal(ret, 'zykuel') - // all consumed. - t.same(list, new BufferList()) -}) + // all consumed. + t.same(list, new BufferList()) + }) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-from-list' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-readable-legacy-drain.js b/src/test/browser/test-stream2-readable-legacy-drain.js index ab72df33f7..8cd09c2fc6 100644 --- a/src/test/browser/test-stream2-readable-legacy-drain.js +++ b/src/test/browser/test-stream2-readable-legacy-drain.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Stream, Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('readable legacy drain', function (t) { +module.exports = function (t) { t.plan(3) const r = new Readable() @@ -42,4 +42,6 @@ test('readable legacy drain', function (t) { } r.pipe(w) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-legacy-drain' diff --git a/src/test/browser/test-stream2-readable-non-empty-end.js b/src/test/browser/test-stream2-readable-non-empty-end.js index dd7bbbc6e3..f9e2983142 100644 --- a/src/test/browser/test-stream2-readable-non-empty-end.js +++ b/src/test/browser/test-stream2-readable-non-empty-end.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('non empty end', function (t) { +module.exports = function (t) { t.plan(4) let len = 0 @@ -55,4 +55,6 @@ test('non empty end', function (t) { r = test.read() t.equal(r, null) } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-non-empty-end' diff --git a/src/test/browser/test-stream2-readable-wrap-empty.js b/src/test/browser/test-stream2-readable-wrap-empty.js index 2f86c95ce0..7779ac91af 100644 --- a/src/test/browser/test-stream2-readable-wrap-empty.js +++ b/src/test/browser/test-stream2-readable-wrap-empty.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') const { EventEmitter: EE } = require('events') const Readable = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('wrap empty', function (t) { +module.exports = function (t) { t.plan(1) const oldStream = new EE() @@ -20,4 +20,6 @@ test('wrap empty', function (t) { }) oldStream.emit('end') -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap-empty' diff --git a/src/test/browser/test-stream2-readable-wrap.js b/src/test/browser/test-stream2-readable-wrap.js index e3edc32253..0dff5fb8f3 100644 --- a/src/test/browser/test-stream2-readable-wrap.js +++ b/src/test/browser/test-stream2-readable-wrap.js @@ -1,94 +1,99 @@ 'use strict' -const test = require('tape') const { EventEmitter: EE } = require('events') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') let run = 0 -function runTest(highWaterMark, objectMode, produce) { - test('run #' + ++run, (t) => { - t.plan(4) +module.exports = function (test) { + function runTest(highWaterMark, objectMode, produce) { + test('run #' + ++run, (t) => { + t.plan(4) - const old = new EE() - const r = new Readable({ highWaterMark: highWaterMark, objectMode: objectMode }) - t.equal(r, r.wrap(old)) + const old = new EE() + const r = new Readable({ highWaterMark, objectMode }) + t.equal(r, r.wrap(old)) - let ended = false - r.on('end', function () { - ended = true - }) + let ended = false + r.on('end', function () { + ended = true + }) - old.pause = function () { - // console.error('old.pause()'); - old.emit('pause') - flowing = false - } + old.pause = function () { + // console.error('old.pause()'); + old.emit('pause') + flowing = false + } - old.resume = function () { - // console.error('old.resume()'); - old.emit('resume') - flow() - } - - let flowing - let chunks = 10 - let oldEnded = false - const expected = [] - function flow() { - flowing = true - // eslint-disable-next-line no-unmodified-loop-condition - while (flowing && chunks-- > 0) { - const item = produce() - expected.push(item) - // console.log('old.emit', chunks, flowing); - old.emit('data', item) - // console.log('after emit', chunks, flowing); + old.resume = function () { + // console.error('old.resume()'); + old.emit('resume') + flow() } - if (chunks <= 0) { - oldEnded = true - // console.log('old end', chunks, flowing); - old.emit('end') + + let flowing + let chunks = 10 + let oldEnded = false + const expected = [] + function flow() { + flowing = true + // eslint-disable-next-line no-unmodified-loop-condition + while (flowing && chunks-- > 0) { + const item = produce() + expected.push(item) + // console.log('old.emit', chunks, flowing); + old.emit('data', item) + // console.log('after emit', chunks, flowing); + } + if (chunks <= 0) { + oldEnded = true + // console.log('old end', chunks, flowing); + old.emit('end') + } } - } - - const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode: objectMode }) - const written = [] - w._write = function (chunk, encoding, cb) { - // console.log('_write', chunk); - written.push(chunk) - setTimeout(cb) - } - - w.on('finish', function () { - performAsserts() - }) - r.pipe(w) + const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode }) + const written = [] + w._write = function (chunk, encoding, cb) { + // console.log('_write', chunk); + written.push(chunk) + setTimeout(cb) + } + + w.on('finish', function () { + performAsserts() + }) - flow() + r.pipe(w) + + flow() - function performAsserts() { - t.ok(ended) - t.ok(oldEnded) - t.deepEqual(written, expected) - } + function performAsserts() { + t.ok(ended) + t.ok(oldEnded) + t.deepEqual(written, expected) + } + }) + } + + runTest(100, false, function () { + return Buffer.alloc(100) }) -} -runTest(100, false, function () { - return Buffer.alloc(100) -}) + runTest(10, false, function () { + return Buffer.from('xxxxxxxxxx') + }) -runTest(10, false, function () { - return Buffer.from('xxxxxxxxxx') -}) + runTest(1, true, function () { + return { foo: 'bar' } + }) -runTest(1, true, function () { - return { foo: 'bar' } -}) + const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555] + runTest(1, true, function () { + return objectChunks.shift() + }) +} -const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555] -runTest(1, true, function () { - return objectChunks.shift() -}) +module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-set-encoding.js b/src/test/browser/test-stream2-set-encoding.js index 783a424c88..3e092e2408 100644 --- a/src/test/browser/test-stream2-set-encoding.js +++ b/src/test/browser/test-stream2-set-encoding.js @@ -1,8 +1,8 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') inherits(TestReader, Readable) @@ -41,295 +41,300 @@ TestReader.prototype._read = function (n) { ) } -test('setEncoding utf8', function (t) { - t.plan(1) - - const tr = new TestReader(100) - tr.setEncoding('utf8') - const out = [] - const expect = [ - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) +module.exports = function (test) { + test('setEncoding utf8', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('utf8') + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) - tr.on('end', function () { - t.same(out, expect) - }) -}) - -test('setEncoding hex', function (t) { - t.plan(1) - - const tr = new TestReader(100) - tr.setEncoding('hex') - const out = [] - const expect = [ - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } + tr.on('end', function () { + t.same(out, expect) + }) }) - tr.on('end', function () { - t.same(out, expect) - }) -}) - -test('setEncoding hex with read(13)', function (t) { - t.plan(1) - - const tr = new TestReader(100) - tr.setEncoding('hex') - const out = [] - const expect = [ - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' - ] - - tr.on('readable', function flow() { - // console.log('readable once'); - let chunk - while ((chunk = tr.read(13)) !== null) { - out.push(chunk) - } - }) + test('setEncoding hex', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) - tr.on('end', function () { - // console.log('END'); - t.same(out, expect) - }) -}) - -test('setEncoding base64', function (t) { - t.plan(1) - - const tr = new TestReader(100) - tr.setEncoding('base64') - const out = [] - const expect = [ - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } + tr.on('end', function () { + t.same(out, expect) + }) }) - tr.on('end', function () { - t.same(out, expect) - }) -}) - -test('encoding: utf8', function (t) { - t.plan(1) - - const tr = new TestReader(100, { encoding: 'utf8' }) - const out = [] - const expect = [ - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) + test('setEncoding hex with read(13)', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + + tr.on('readable', function flow() { + // console.log('readable once'); + let chunk + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) - tr.on('end', function () { - t.same(out, expect) - }) -}) - -test('encoding: hex', function (t) { - t.plan(1) - - const tr = new TestReader(100, { encoding: 'hex' }) - const out = [] - const expect = [ - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } + tr.on('end', function () { + // console.log('END'); + t.same(out, expect) + }) }) - tr.on('end', function () { - t.same(out, expect) + test('setEncoding base64', function (t) { + t.plan(1) + + const tr = new TestReader(100) + tr.setEncoding('base64') + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) }) -}) - -test('encoding: hex with read(13)', function (t) { - t.plan(1) - - const tr = new TestReader(100, { encoding: 'hex' }) - const out = [] - const expect = [ - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(13)) !== null) { - out.push(chunk) - } + + test('encoding: utf8', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'utf8' }) + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) }) - tr.on('end', function () { - t.same(out, expect) + test('encoding: hex', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'hex' }) + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) }) -}) - -test('encoding: base64', function (t) { - t.plan(1) - - const tr = new TestReader(100, { encoding: 'base64' }) - const out = [] - const expect = [ - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' - ] - - tr.on('readable', function flow() { - let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } + + test('encoding: hex with read(13)', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'hex' }) + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) }) - tr.on('end', function () { - t.same(out, expect) + test('encoding: base64', function (t) { + t.plan(1) + + const tr = new TestReader(100, { encoding: 'base64' }) + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + + tr.on('readable', function flow() { + let chunk + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + + tr.on('end', function () { + t.same(out, expect) + }) }) -}) -test('chainable', function (t) { - t.plan(1) + test('chainable', function (t) { + t.plan(1) + + const tr = new TestReader(100) + t.equal(tr.setEncoding('utf8'), tr) + }) +} - const tr = new TestReader(100) - t.equal(tr.setEncoding('utf8'), tr) -}) +module.exports[kReadableStreamSuiteName] = 'stream2-set-encoding' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-transform.js b/src/test/browser/test-stream2-transform.js index 43a1fc5afc..e0168307bc 100644 --- a/src/test/browser/test-stream2-transform.js +++ b/src/test/browser/test-stream2-transform.js @@ -1,484 +1,489 @@ 'use strict' -const test = require('tape') const { PassThrough, Transform } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') -test('writable side consumption', function (t) { - t.plan(3) +module.exports = function (test) { + test('writable side consumption', function (t) { + t.plan(3) - const tx = new Transform({ - highWaterMark: 10 - }) + const tx = new Transform({ + highWaterMark: 10 + }) - let transformed = 0 - tx._transform = function (chunk, encoding, cb) { - transformed += chunk.length - tx.push(chunk) - cb() - } + let transformed = 0 + tx._transform = function (chunk, encoding, cb) { + transformed += chunk.length + tx.push(chunk) + cb() + } - for (let i = 1; i <= 10; i++) { - tx.write(Buffer.alloc(i)) - } - tx.end() - - t.equal(tx._readableState.length, 10) - t.equal(transformed, 10) - t.same( - tx._writableState.getBuffer().map(function (c) { - return c.chunk.length - }), - [5, 6, 7, 8, 9, 10] - ) -}) - -test('passthrough', function (t) { - t.plan(4) - - const pt = new PassThrough() - - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5).toString(), 'arkba') - t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5).toString(), 'l') -}) - -test('object passthrough', function (t) { - t.plan(7) - - const pt = new PassThrough({ objectMode: true }) - - pt.write(1) - pt.write(true) - pt.write(false) - pt.write(0) - pt.write('foo') - pt.write('') - pt.write({ a: 'b' }) - pt.end() - - t.equal(pt.read(), 1) - t.equal(pt.read(), true) - t.equal(pt.read(), false) - t.equal(pt.read(), 0) - t.equal(pt.read(), 'foo') - t.equal(pt.read(), '') - t.same(pt.read(), { a: 'b' }) -}) - -test('simple transform', function (t) { - t.plan(4) - - const pt = new Transform() - pt._transform = function (c, e, cb) { - const ret = Buffer.alloc(c.length) - ret.fill('x') - pt.push(ret) - cb() - } + for (let i = 1; i <= 10; i++) { + tx.write(Buffer.alloc(i)) + } + tx.end() + + t.equal(tx._readableState.length, 10) + t.equal(transformed, 10) + t.same( + tx._writableState.getBuffer().map(function (c) { + return c.chunk.length + }), + [5, 6, 7, 8, 9, 10] + ) + }) - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - - t.equal(pt.read(5).toString(), 'xxxxx') - t.equal(pt.read(5).toString(), 'xxxxx') - t.equal(pt.read(5).toString(), 'xxxxx') - t.equal(pt.read(5).toString(), 'x') -}) - -test('simple object transform', function (t) { - t.plan(7) - - const pt = new Transform({ objectMode: true }) - pt._transform = function (c, e, cb) { - pt.push(JSON.stringify(c)) - cb() - } + test('passthrough', function (t) { + t.plan(4) - pt.write(1) - pt.write(true) - pt.write(false) - pt.write(0) - pt.write('foo') - pt.write('') - pt.write({ a: 'b' }) - pt.end() - - t.equal(pt.read(), '1') - t.equal(pt.read(), 'true') - t.equal(pt.read(), 'false') - t.equal(pt.read(), '0') - t.equal(pt.read(), '"foo"') - t.equal(pt.read(), '""') - t.equal(pt.read(), '{"a":"b"}') -}) - -test('async passthrough', function (t) { - t.plan(4) - - const pt = new Transform() - pt._transform = function (chunk, encoding, cb) { - setTimeout(function () { - pt.push(chunk) - cb() - }, 10) - } + const pt = new PassThrough() - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() - pt.on('finish', function () { t.equal(pt.read(5).toString(), 'foogb') t.equal(pt.read(5).toString(), 'arkba') t.equal(pt.read(5).toString(), 'zykue') t.equal(pt.read(5).toString(), 'l') }) -}) -test('assymetric transform (expand)', function (t) { - t.plan(7) + test('object passthrough', function (t) { + t.plan(7) + + const pt = new PassThrough({ objectMode: true }) + + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ a: 'b' }) + pt.end() + + t.equal(pt.read(), 1) + t.equal(pt.read(), true) + t.equal(pt.read(), false) + t.equal(pt.read(), 0) + t.equal(pt.read(), 'foo') + t.equal(pt.read(), '') + t.same(pt.read(), { a: 'b' }) + }) - const pt = new Transform() + test('simple transform', function (t) { + t.plan(4) - // emit each chunk 2 times. - pt._transform = function (chunk, encoding, cb) { - setTimeout(function () { - pt.push(chunk) + const pt = new Transform() + pt._transform = function (c, e, cb) { + const ret = Buffer.alloc(c.length) + ret.fill('x') + pt.push(ret) + cb() + } + + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'x') + }) + + test('simple object transform', function (t) { + t.plan(7) + + const pt = new Transform({ objectMode: true }) + pt._transform = function (c, e, cb) { + pt.push(JSON.stringify(c)) + cb() + } + + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ a: 'b' }) + pt.end() + + t.equal(pt.read(), '1') + t.equal(pt.read(), 'true') + t.equal(pt.read(), 'false') + t.equal(pt.read(), '0') + t.equal(pt.read(), '"foo"') + t.equal(pt.read(), '""') + t.equal(pt.read(), '{"a":"b"}') + }) + + test('async passthrough', function (t) { + t.plan(4) + + const pt = new Transform() + pt._transform = function (chunk, encoding, cb) { setTimeout(function () { pt.push(chunk) cb() }, 10) - }, 10) - } + } - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - - pt.on('finish', function () { - t.equal(pt.read(5).toString(), 'foogf') - t.equal(pt.read(5).toString(), 'oogba') - t.equal(pt.read(5).toString(), 'rkbar') - t.equal(pt.read(5).toString(), 'kbazy') - t.equal(pt.read(5).toString(), 'bazyk') - t.equal(pt.read(5).toString(), 'uelku') - t.equal(pt.read(5).toString(), 'el') - }) -}) + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() -test('assymetric transform (compress)', function (t) { - t.plan(3) + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') + }) + }) - const pt = new Transform() + test('assymetric transform (expand)', function (t) { + t.plan(7) - // each output is the first char of 3 consecutive chunks, - // or whatever's left. - pt.state = '' + const pt = new Transform() - pt._transform = function (chunk, encoding, cb) { - if (!chunk) { - chunk = '' + // emit each chunk 2 times. + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + }, 10) } - const s = chunk.toString() - setTimeout( - function () { - this.state += s.charAt(0) - if (this.state.length === 3) { - pt.push(Buffer.from(this.state)) - this.state = '' - } - cb() - }.bind(this), - 10 - ) - } - pt._flush = function (cb) { - // just output whatever we have. - pt.push(Buffer.from(this.state)) - this.state = '' - cb() - } - - pt.write(Buffer.from('aaaa')) - pt.write(Buffer.from('bbbb')) - pt.write(Buffer.from('cccc')) - pt.write(Buffer.from('dddd')) - pt.write(Buffer.from('eeee')) - pt.write(Buffer.from('aaaa')) - pt.write(Buffer.from('bbbb')) - pt.write(Buffer.from('cccc')) - pt.write(Buffer.from('dddd')) - pt.write(Buffer.from('eeee')) - pt.write(Buffer.from('aaaa')) - pt.write(Buffer.from('bbbb')) - pt.write(Buffer.from('cccc')) - pt.write(Buffer.from('dddd')) - pt.end() - - // 'abcdeabcdeabcd' - pt.on('finish', function () { - t.equal(pt.read(5).toString(), 'abcde') - t.equal(pt.read(5).toString(), 'abcde') - t.equal(pt.read(5).toString(), 'abcd') + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogf') + t.equal(pt.read(5).toString(), 'oogba') + t.equal(pt.read(5).toString(), 'rkbar') + t.equal(pt.read(5).toString(), 'kbazy') + t.equal(pt.read(5).toString(), 'bazyk') + t.equal(pt.read(5).toString(), 'uelku') + t.equal(pt.read(5).toString(), 'el') + }) }) -}) - -// this tests for a stall when data is written to a full stream -// that has empty transforms. -test('complex transform', function (t) { - t.plan(2) - - let count = 0 - let saved = null - const pt = new Transform({ highWaterMark: 3 }) - pt._transform = function (c, e, cb) { - if (count++ === 1) { - saved = c - } else { - if (saved) { - pt.push(saved) - saved = null + + test('assymetric transform (compress)', function (t) { + t.plan(3) + + const pt = new Transform() + + // each output is the first char of 3 consecutive chunks, + // or whatever's left. + pt.state = '' + + pt._transform = function (chunk, encoding, cb) { + if (!chunk) { + chunk = '' } - pt.push(c) + const s = chunk.toString() + setTimeout( + function () { + this.state += s.charAt(0) + if (this.state.length === 3) { + pt.push(Buffer.from(this.state)) + this.state = '' + } + cb() + }.bind(this), + 10 + ) } - cb() - } + pt._flush = function (cb) { + // just output whatever we have. + pt.push(Buffer.from(this.state)) + this.state = '' + cb() + } - pt.once('readable', function () { - process.nextTick(function () { - pt.write(Buffer.from('d')) - pt.write(Buffer.from('ef'), function () { - pt.end() - }) - t.equal(pt.read().toString(), 'abcdef') - t.equal(pt.read(), null) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.end() + + // 'abcdeabcdeabcd' + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcd') }) }) - pt.write(Buffer.from('abc')) -}) - -test('passthrough event emission', function (t) { - t.plan(11) + // this tests for a stall when data is written to a full stream + // that has empty transforms. + test('complex transform', function (t) { + t.plan(2) + + let count = 0 + let saved = null + const pt = new Transform({ highWaterMark: 3 }) + pt._transform = function (c, e, cb) { + if (count++ === 1) { + saved = c + } else { + if (saved) { + pt.push(saved) + saved = null + } + pt.push(c) + } - const pt = new PassThrough() - let emits = 0 - pt.on('readable', function () { - // console.error('>>> emit readable %d', emits); - emits++ - }) + cb() + } - pt.write(Buffer.from('foog')) + pt.once('readable', function () { + process.nextTick(function () { + pt.write(Buffer.from('d')) + pt.write(Buffer.from('ef'), function () { + pt.end() + }) + t.equal(pt.read().toString(), 'abcdef') + t.equal(pt.read(), null) + }) + }) - // console.error('need emit 0'); - pt.write(Buffer.from('bark')) + pt.write(Buffer.from('abc')) + }) - setTimeout(() => { - // console.error('should have emitted readable now 1 === %d', emits) - t.equal(emits, 1) + test('passthrough event emission', function (t) { + t.plan(11) - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5) + '', 'null') + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('>>> emit readable %d', emits); + emits++ + }) - // console.error('need emit 1'); + pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bazy')) - // console.error('should have emitted, but not again'); - pt.write(Buffer.from('kuel')) + // console.error('need emit 0'); + pt.write(Buffer.from('bark')) - // console.error('should have emitted readable now 2 === %d', emits); setTimeout(() => { - t.equal(emits, 2) + // console.error('should have emitted readable now 1 === %d', emits) + t.equal(emits, 1) - t.equal(pt.read(5).toString(), 'arkba') - t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5), null) + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5) + '', 'null') - // console.error('need emit 2'); + // console.error('need emit 1'); - pt.end() + pt.write(Buffer.from('bazy')) + // console.error('should have emitted, but not again'); + pt.write(Buffer.from('kuel')) + // console.error('should have emitted readable now 2 === %d', emits); setTimeout(() => { - t.equal(emits, 3) + t.equal(emits, 2) - t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') t.equal(pt.read(5), null) - // console.error('should not have emitted again'); - t.equal(emits, 3) - }) - }) - }) -}) + // console.error('need emit 2'); -test('passthrough event emission reordered', function (t) { - t.plan(10) + pt.end() + + setTimeout(() => { + t.equal(emits, 3) + + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) - const pt = new PassThrough() - let emits = 0 - pt.on('readable', function () { - // console.error('emit readable', emits); - emits++ + // console.error('should not have emitted again'); + t.equal(emits, 3) + }) + }) + }) }) - pt.write(Buffer.from('foog')) - // console.error('need emit 0'); - pt.write(Buffer.from('bark')) + test('passthrough event emission reordered', function (t) { + t.plan(10) - setTimeout(() => { - // console.error('should have emitted readable now 1 === %d', emits); - t.equal(emits, 1) + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('emit readable', emits); + emits++ + }) - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5), null) + pt.write(Buffer.from('foog')) + // console.error('need emit 0'); + pt.write(Buffer.from('bark')) - // console.error('need emit 1'); - pt.once('readable', function () { - t.equal(pt.read(5).toString(), 'arkba') + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits); + t.equal(emits, 1) + t.equal(pt.read(5).toString(), 'foogb') t.equal(pt.read(5), null) - // console.error('need emit 2'); + // console.error('need emit 1'); pt.once('readable', function () { - t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5), null) + + // console.error('need emit 2'); pt.once('readable', function () { - t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5).toString(), 'zykue') t.equal(pt.read(5), null) - t.equal(emits, 4) + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) + t.equal(emits, 4) + }) + pt.end() }) - pt.end() + pt.write(Buffer.from('kuel')) }) - pt.write(Buffer.from('kuel')) - }) - pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('bazy')) + }) }) -}) -test('passthrough facaded', function (t) { - t.plan(1) + test('passthrough facaded', function (t) { + t.plan(1) - // console.error('passthrough facaded'); - const pt = new PassThrough() - const datas = [] - pt.on('data', function (chunk) { - datas.push(chunk.toString()) - }) + // console.error('passthrough facaded'); + const pt = new PassThrough() + const datas = [] + pt.on('data', function (chunk) { + datas.push(chunk.toString()) + }) - pt.on('end', function () { - t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) - }) + pt.on('end', function () { + t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) + }) - pt.write(Buffer.from('foog')) - setTimeout(function () { - pt.write(Buffer.from('bark')) + pt.write(Buffer.from('foog')) setTimeout(function () { - pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('bark')) setTimeout(function () { - pt.write(Buffer.from('kuel')) + pt.write(Buffer.from('bazy')) setTimeout(function () { - pt.end() + pt.write(Buffer.from('kuel')) + setTimeout(function () { + pt.end() + }, 10) }, 10) }, 10) }, 10) - }, 10) -}) + }) -test('object transform (json parse)', function (t) { - t.plan(5) + test('object transform (json parse)', function (t) { + t.plan(5) - // console.error('json parse stream'); - const jp = new Transform({ objectMode: true }) - jp._transform = function (data, encoding, cb) { - try { - jp.push(JSON.parse(data)) - cb() - } catch (er) { - cb(er) + // console.error('json parse stream'); + const jp = new Transform({ objectMode: true }) + jp._transform = function (data, encoding, cb) { + try { + jp.push(JSON.parse(data)) + cb() + } catch (er) { + cb(er) + } } - } - // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] - let ended = false - jp.on('end', function () { - ended = true - }) + let ended = false + jp.on('end', function () { + ended = true + }) - forEach(objects, function (obj) { - jp.write(JSON.stringify(obj)) - const res = jp.read() - t.same(res, obj) - }) + forEach(objects, function (obj) { + jp.write(JSON.stringify(obj)) + const res = jp.read() + t.same(res, obj) + }) - jp.end() - // read one more time to get the 'end' event - jp.read() + jp.end() + // read one more time to get the 'end' event + jp.read() - process.nextTick(function () { - t.ok(ended) + process.nextTick(function () { + t.ok(ended) + }) }) -}) -test('object transform (json stringify)', function (t) { - t.plan(5) + test('object transform (json stringify)', function (t) { + t.plan(5) - // console.error('json parse stream'); - const js = new Transform({ objectMode: true }) - js._transform = function (data, encoding, cb) { - try { - js.push(JSON.stringify(data)) - cb() - } catch (er) { - cb(er) + // console.error('json parse stream'); + const js = new Transform({ objectMode: true }) + js._transform = function (data, encoding, cb) { + try { + js.push(JSON.stringify(data)) + cb() + } catch (er) { + cb(er) + } } - } - // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] + // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }] - let ended = false - js.on('end', function () { - ended = true - }) + let ended = false + js.on('end', function () { + ended = true + }) - forEach(objects, function (obj) { - js.write(obj) - const res = js.read() - t.equal(res, JSON.stringify(obj)) - }) + forEach(objects, function (obj) { + js.write(obj) + const res = js.read() + t.equal(res, JSON.stringify(obj)) + }) - js.end() - // read one more time to get the 'end' event - js.read() + js.end() + // read one more time to get the 'end' event + js.read() - process.nextTick(function () { - t.ok(ended) + process.nextTick(function () { + t.ok(ended) + }) }) -}) -function forEach(xs, f) { - for (let i = 0, l = xs.length; i < l; i++) { - f(xs[i], i) + function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } } } + +module.exports[kReadableStreamSuiteName] = 'stream2-transform' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream2-unpipe-drain.js b/src/test/browser/test-stream2-unpipe-drain.js index 72b137a0c1..7d3192eec1 100644 --- a/src/test/browser/test-stream2-unpipe-drain.js +++ b/src/test/browser/test-stream2-unpipe-drain.js @@ -1,11 +1,11 @@ 'use strict' -const test = require('tape') const crypto = require('crypto') const inherits = require('inherits') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('unpipe drain', function (t) { +module.exports = function (t) { try { crypto.randomBytes(9) } catch (_) { @@ -60,4 +60,6 @@ test('unpipe drain', function (t) { t.equal(src1.reads, 2) t.equal(src2.reads, 1) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-unpipe-drain' diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js index 834ab94523..29de2de572 100644 --- a/src/test/browser/test-stream2-writable.js +++ b/src/test/browser/test-stream2-writable.js @@ -1,8 +1,8 @@ 'use strict' -const test = require('tape') const inherits = require('inherits') const { Duplex, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') inherits(TestWriter, Writable) @@ -44,387 +44,392 @@ if (!process.stdout) { process.stdout = new Processstdout() } -test('write fast', function (t) { - t.plan(1) +module.exports = function (test) { + test('write fast', function (t) { + t.plan(1) - const tw = new TestWriter({ - highWaterMark: 100 - }) + const tw = new TestWriter({ + highWaterMark: 100 + }) - tw.on('finish', function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') - }) + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) - forEach(chunks, function (chunk) { - // screw backpressure. Just buffer it all up. - tw.write(chunk) + forEach(chunks, function (chunk) { + // screw backpressure. Just buffer it all up. + tw.write(chunk) + }) + tw.end() }) - tw.end() -}) -test('write slow', function (t) { - t.plan(1) + test('write slow', function (t) { + t.plan(1) - const tw = new TestWriter({ - highWaterMark: 100 - }) + const tw = new TestWriter({ + highWaterMark: 100 + }) + + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) - tw.on('finish', function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') + let i = 0 + ;(function W() { + tw.write(chunks[i++]) + if (i < chunks.length) { + setTimeout(W, 10) + } else { + tw.end() + } + })() }) - let i = 0 - ;(function W() { - tw.write(chunks[i++]) - if (i < chunks.length) { - setTimeout(W, 10) - } else { - tw.end() - } - })() -}) + test('write backpressure', function (t) { + t.plan(19) -test('write backpressure', function (t) { - t.plan(19) + const tw = new TestWriter({ + highWaterMark: 50 + }) - const tw = new TestWriter({ - highWaterMark: 50 - }) + let drains = 0 - let drains = 0 + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.equal(drains, 17) + }) - tw.on('finish', function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') - t.equal(drains, 17) - }) + tw.on('drain', function () { + drains++ + }) - tw.on('drain', function () { - drains++ + let i = 0 + ;(function W() { + let ret + do { + ret = tw.write(chunks[i++]) + } while (ret !== false && i < chunks.length) + + if (i < chunks.length) { + t.ok(tw._writableState.length >= 50) + tw.once('drain', W) + } else { + tw.end() + } + })() }) - let i = 0 - ;(function W() { - let ret - do { - ret = tw.write(chunks[i++]) - } while (ret !== false && i < chunks.length) - - if (i < chunks.length) { - t.ok(tw._writableState.length >= 50) - tw.once('drain', W) - } else { - tw.end() - } - })() -}) + test('write bufferize', function (t) { + t.plan(50) -test('write bufferize', function (t) { - t.plan(50) + const tw = new TestWriter({ + highWaterMark: 100 + }) - const tw = new TestWriter({ - highWaterMark: 100 - }) + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) + + // Some combination of encoding and length result in the last byte replaced by two extra null bytes + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } + + t.same(actual, chunk, 'got the expected chunks ' + i) + }) + }) - const encodings = [ - 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined - ] - - tw.on('finish', function () { forEach(chunks, function (chunk, i) { - const actual = Buffer.from(tw.buffer[i]) + const enc = encodings[i % encodings.length] chunk = Buffer.from(chunk) - - // Some combination of encoding and length result in the last byte replaced by two extra null bytes - if (actual[actual.length - 1] === 0) { - chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) - } - - t.same(actual, chunk, 'got the expected chunks ' + i) + tw.write(chunk.toString(enc), enc) }) + tw.end() }) - forEach(chunks, function (chunk, i) { - const enc = encodings[i % encodings.length] - chunk = Buffer.from(chunk) - tw.write(chunk.toString(enc), enc) - }) - tw.end() -}) + test('write no bufferize', function (t) { + t.plan(100) -test('write no bufferize', function (t) { - t.plan(100) + const tw = new TestWriter({ + highWaterMark: 100, + decodeStrings: false + }) - const tw = new TestWriter({ - highWaterMark: 100, - decodeStrings: false - }) + tw._write = function (chunk, encoding, cb) { + t.equals(typeof chunk, 'string') + chunk = Buffer.from(chunk, encoding) + return TestWriter.prototype._write.call(this, chunk, encoding, cb) + } - tw._write = function (chunk, encoding, cb) { - t.equals(typeof chunk, 'string') - chunk = Buffer.from(chunk, encoding) - return TestWriter.prototype._write.call(this, chunk, encoding, cb) - } + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) + + // Some combination of encoding and length result in the last byte replaced by two extra null bytes + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } + + t.same(actual, chunk, 'got the expected chunks ' + i) + }) + }) - const encodings = [ - 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined - ] - - tw.on('finish', function () { forEach(chunks, function (chunk, i) { - const actual = Buffer.from(tw.buffer[i]) + const enc = encodings[i % encodings.length] chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) + tw.end() + }) + + test('write callbacks', function (t) { + t.plan(2) + + const callbacks = chunks + .map(function (chunk, i) { + return [ + i, + function (er) { + callbacks._called[i] = chunk + } + ] + }) + .reduce(function (set, x) { + set['callback-' + x[0]] = x[1] + return set + }, {}) + callbacks._called = [] + + const tw = new TestWriter({ + highWaterMark: 100 + }) - // Some combination of encoding and length result in the last byte replaced by two extra null bytes - if (actual[actual.length - 1] === 0) { - chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) - } + tw.on('finish', function () { + process.nextTick(function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.same(callbacks._called, chunks, 'called all callbacks') + }) + }) - t.same(actual, chunk, 'got the expected chunks ' + i) + forEach(chunks, function (chunk, i) { + tw.write(chunk, callbacks['callback-' + i]) }) + tw.end() }) - forEach(chunks, function (chunk, i) { - const enc = encodings[i % encodings.length] - chunk = Buffer.from(chunk) - tw.write(chunk.toString(enc), enc) - }) - tw.end() -}) - -test('write callbacks', function (t) { - t.plan(2) - - const callbacks = chunks - .map(function (chunk, i) { - return [ - i, - function (er) { - callbacks._called[i] = chunk - } - ] - }) - .reduce(function (set, x) { - set['callback-' + x[0]] = x[1] - return set - }, {}) - callbacks._called = [] + test('end callback', function (t) { + t.plan(1) - const tw = new TestWriter({ - highWaterMark: 100 + const tw = new TestWriter() + tw.end(() => { + t.ok(true) + }) }) - tw.on('finish', function () { - process.nextTick(function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') - t.same(callbacks._called, chunks, 'called all callbacks') + test('end callback with chunk', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end(Buffer.from('hello world'), () => { + t.ok(true) }) }) - forEach(chunks, function (chunk, i) { - tw.write(chunk, callbacks['callback-' + i]) + test('end callback with chunk and encoding', function (t) { + t.plan(1) + + const tw = new TestWriter() + tw.end('hello world', 'ascii', () => { + t.ok(true) + }) }) - tw.end() -}) -test('end callback', function (t) { - t.plan(1) + test('end callback after .write() call', function (t) { + t.plan(1) - const tw = new TestWriter() - tw.end(() => { - t.ok(true) + const tw = new TestWriter() + tw.write(Buffer.from('hello world')) + tw.end(() => { + t.ok(true) + }) }) -}) -test('end callback with chunk', function (t) { - t.plan(1) + test('end callback called after write callback', function (t) { + t.plan(1) - const tw = new TestWriter() - tw.end(Buffer.from('hello world'), () => { - t.ok(true) + const tw = new TestWriter() + let writeCalledback = false + tw.write(Buffer.from('hello world'), function () { + writeCalledback = true + }) + tw.end(function () { + t.equal(writeCalledback, true) + }) }) -}) -test('end callback with chunk and encoding', function (t) { - t.plan(1) + test('encoding should be ignored for buffers', function (t) { + t.plan(1) - const tw = new TestWriter() - tw.end('hello world', 'ascii', () => { - t.ok(true) + const tw = new Writable() + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' + tw._write = function (chunk, encoding, cb) { + t.equal(chunk.toString('hex'), hex) + } + const buf = Buffer.from(hex, 'hex') + tw.write(buf, 'binary') }) -}) -test('end callback after .write() call', function (t) { - t.plan(1) + test('writables are not pipable', function (t) { + t.plan(1) - const tw = new TestWriter() - tw.write(Buffer.from('hello world')) - tw.end(() => { - t.ok(true) + const w = new Writable({ autoDestroy: false }) + w._write = function () {} + let gotError = false + w.on('error', function (er) { + gotError = true + }) + w.pipe(process.stdout) + t.ok(gotError) }) -}) -test('end callback called after write callback', function (t) { - t.plan(1) + test('duplexes are pipable', function (t) { + t.plan(1) - const tw = new TestWriter() - let writeCalledback = false - tw.write(Buffer.from('hello world'), function () { - writeCalledback = true - }) - tw.end(function () { - t.equal(writeCalledback, true) + const d = new Duplex() + d._read = function () {} + d._write = function () {} + let gotError = false + d.on('error', function (er) { + gotError = true + }) + d.pipe(process.stdout) + t.notOk(gotError) }) -}) -test('encoding should be ignored for buffers', function (t) { - t.plan(1) + test('end(chunk) two times is an error', function (t) { + t.plan(2) - const tw = new Writable() - const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' - tw._write = function (chunk, encoding, cb) { - t.equal(chunk.toString('hex'), hex) - } - const buf = Buffer.from(hex, 'hex') - tw.write(buf, 'binary') -}) - -test('writables are not pipable', function (t) { - t.plan(1) - - const w = new Writable({ autoDestroy: false }) - w._write = function () {} - let gotError = false - w.on('error', function (er) { - gotError = true - }) - w.pipe(process.stdout) - t.ok(gotError) -}) - -test('duplexes are pipable', function (t) { - t.plan(1) - - const d = new Duplex() - d._read = function () {} - d._write = function () {} - let gotError = false - d.on('error', function (er) { - gotError = true - }) - d.pipe(process.stdout) - t.notOk(gotError) -}) - -test('end(chunk) two times is an error', function (t) { - t.plan(2) - - const w = new Writable() - w._write = function () {} - let gotError = false - w.on('error', function (er) { - gotError = true - t.equal(er.message, 'write after end') - }) - w.end('this is the end') - w.end('and so is this') - process.nextTick(function () { - t.ok(gotError) - }) -}) - -test('dont end while writing', function (t) { - t.plan(2) - - const w = new Writable() - let wrote = false - w._write = function (chunk, e, cb) { - t.notOk(this.writing) - wrote = true - this.writing = true - setTimeout(function () { - this.writing = false - cb() + const w = new Writable() + w._write = function () {} + let gotError = false + w.on('error', function (er) { + gotError = true + t.equal(er.message, 'write after end') + }) + w.end('this is the end') + w.end('and so is this') + process.nextTick(function () { + t.ok(gotError) }) - } - w.on('finish', function () { - t.ok(wrote) }) - w.write(Buffer.alloc(0)) - w.end() -}) -test('finish does not come before write cb', function (t) { - t.plan(1) + test('dont end while writing', function (t) { + t.plan(2) - const w = new Writable() - let writeCb = false - w._write = function (chunk, e, cb) { - setTimeout(function () { - writeCb = true - cb() - }, 10) - } - w.on('finish', function () { - t.ok(writeCb) + const w = new Writable() + let wrote = false + w._write = function (chunk, e, cb) { + t.notOk(this.writing) + wrote = true + this.writing = true + setTimeout(function () { + this.writing = false + cb() + }) + } + w.on('finish', function () { + t.ok(wrote) + }) + w.write(Buffer.alloc(0)) + w.end() }) - w.write(Buffer.alloc(0)) - w.end() -}) -test('finish does not come before sync _write cb', function (t) { - t.plan(1) + test('finish does not come before write cb', function (t) { + t.plan(1) - const w = new Writable() - let writeCb = false - w._write = function (chunk, e, cb) { - cb() - } - w.on('finish', function () { - t.ok(writeCb) - }) - w.write(Buffer.alloc(0), function (er) { - writeCb = true + const w = new Writable() + let writeCb = false + w._write = function (chunk, e, cb) { + setTimeout(function () { + writeCb = true + cb() + }, 10) + } + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0)) + w.end() }) - w.end() -}) -test('finish is emitted if last chunk is empty', function (t) { - t.plan(1) + test('finish does not come before sync _write cb', function (t) { + t.plan(1) - const w = new Writable() - w._write = function (chunk, e, cb) { - process.nextTick(cb) - } - w.on('finish', () => { - t.ok(true) + const w = new Writable() + let writeCb = false + w._write = function (chunk, e, cb) { + cb() + } + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0), function (er) { + writeCb = true + }) + w.end() }) - w.write(Buffer.alloc(1)) - w.end(Buffer.alloc(0)) -}) + test('finish is emitted if last chunk is empty', function (t) { + t.plan(1) -function forEach(xs, f) { - for (let i = 0, l = xs.length; i < l; i++) { - f(xs[i], i) + const w = new Writable() + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } + w.on('finish', () => { + t.ok(true) + }) + + w.write(Buffer.alloc(1)) + w.end(Buffer.alloc(0)) + }) + + function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } } } + +module.exports[kReadableStreamSuiteName] = 'stream2-writable' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/src/test/browser/test-stream3-pause-then-read.js b/src/test/browser/test-stream3-pause-then-read.js index f7b22f7b46..6b4399f656 100644 --- a/src/test/browser/test-stream3-pause-then-read.js +++ b/src/test/browser/test-stream3-pause-then-read.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName } = require('./symbols') -test('pause then read', function (t) { +module.exports = function (t) { t.plan(7) const totalChunks = 100 @@ -144,4 +144,6 @@ test('pause then read', function (t) { }) r.pipe(w) } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream3-pause-then-read' diff --git a/src/util.js b/src/util.js index 745a7acf9e..5f788409a1 100644 --- a/src/util.js +++ b/src/util.js @@ -1,24 +1,48 @@ 'use strict' +const bufferModule = require('buffer') + const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor +const Blob = globalThis.Blob || bufferModule.Blob +/* eslint-disable indent */ +const isBlob = + typeof Blob !== 'undefined' + ? function isBlob(b) { + // eslint-disable-next-line indent + return b instanceof Blob + } + : function isBlob(b) { + return false + } +/* eslint-enable indent */ -if (typeof Blob === 'undefined') { - let { Blob } = require('buffer') +// This is a simplified version of AggregateError +class AggregateError extends Error { + constructor(errors) { + if (!Array.isArray(errors)) { + throw new TypeError(`Expected input to be an Array, got ${typeof errors}`) + } - if (typeof Blob === 'undefined') { - Blob = require('blob-polyfill').Blob - } + let message = '' + for (let i = 0; i < errors.length; i++) { + message += ` ${errors[i].stack}\n` + } - globalThis.Blob = Blob + super(message) + this.name = 'AggregateError' + this.errors = errors + } } module.exports = { + AggregateError, once(callback) { let called = false return function (...args) { if (called) { return } + called = true callback.apply(this, args) } @@ -26,14 +50,29 @@ module.exports = { createDeferredPromise: function () { let resolve let reject + // eslint-disable-next-line promise/param-names const promise = new Promise((res, rej) => { resolve = res reject = rej }) - return { promise, resolve, reject } + return { + promise, + resolve, + reject + } + }, + promisify(fn) { + return new Promise((resolve, reject) => { + fn((err, ...args) => { + if (err) { + return reject(err) + } + + return resolve(...args) + }) + }) }, - // All following functions are just used in browser debuglog() { return function () {} }, @@ -46,34 +85,54 @@ module.exports = { return replacement.toFixed(6) } else if (type === 'j') { return JSON.stringify(replacement) + } else if (type === 's' && typeof replacement === 'object') { + const ctor = replacement.constructor !== Object ? replacement.constructor.name : '' + return `${ctor} {}`.trim() } else { return replacement.toString() } }) }, - promisify(fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err) + inspect(value) { + // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options + switch (typeof value) { + case 'string': + if (value.includes("'")) { + if (!value.includes('"')) { + return `"${value}"` + } else if (!value.includes('`') && !value.includes('${')) { + return `\`${value}\`` + } } - return resolve(...args) - }) - }) + + return `'${value}'` + case 'number': + if (isNaN(value)) { + return 'NaN' + } else if (Object.is(value, -0)) { + return String(value) + } + + return value + case 'bigint': + return `${String(value)}n` + case 'boolean': + case 'undefined': + return String(value) + case 'object': + return '{}' + } }, - inspect: require('object-inspect'), types: { isAsyncFunction(fn) { return fn instanceof AsyncFunction }, + isArrayBufferView(arr) { return ArrayBuffer.isView(arr) } }, - isBlob(blob) { - // eslint-disable-next-line no-undef - return blob instanceof Blob - } + isBlob } module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') diff --git a/test/browser/fixtures/index.html b/test/browser/fixtures/index.html new file mode 100644 index 0000000000..603eef3c50 --- /dev/null +++ b/test/browser/fixtures/index.html @@ -0,0 +1,72 @@ + + + + + + +
+ + + + + diff --git a/test/browser/fixtures/rollup.browser.config.js b/test/browser/fixtures/rollup.browser.config.js new file mode 100644 index 0000000000..973797de77 --- /dev/null +++ b/test/browser/fixtures/rollup.browser.config.js @@ -0,0 +1,26 @@ +import commonjs from '@rollup/plugin-commonjs' +import inject from '@rollup/plugin-inject' +import nodeResolve from '@rollup/plugin-node-resolve' +import { resolve } from 'node:path' +import nodePolyfill from 'rollup-plugin-polyfill-node' +export default { + input: ['test/browser/test-browser.js'], + output: { + intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }', + file: 'tmp/rollup/suite.js', + format: 'iife', + name: 'readableStreamTestSuite' + }, + plugins: [ + commonjs(), + nodePolyfill(), + inject({ + process: resolve('node_modules/process-es6/browser.js'), + Buffer: [resolve('node_modules/buffer-es6/index.js'), 'Buffer'] + }), + nodeResolve({ + browser: true, + preferBuiltins: false + }) + ] +} diff --git a/test/browser/runner.mjs b/test/browser/runner.mjs new file mode 100644 index 0000000000..db7f0ac31b --- /dev/null +++ b/test/browser/runner.mjs @@ -0,0 +1,109 @@ +import { dirname, resolve } from 'node:path' +import { Readable } from 'node:stream' +import { chromium, firefox, webkit } from 'playwright' +import reporter from 'tap-mocha-reporter' +import Parser from 'tap-parser' + +const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] +const validBundlers = ['browserify', 'webpack', 'rollup'] + +function parseEnviroment() { + const headless = process.env.HEADLESS !== 'false' + const reporter = process.env.SKIP_REPORTER !== 'true' + + let [browser, bundler] = process.argv.slice(2, 4) + + if (!browser) { + browser = process.env.BROWSER + } + + if (!bundler) { + bundler = process.env.BUNDLER + } + + if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { + console.error('Usage: node runner.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') + console.error('\nYou can also use the BROWSER and BUNDLER environment variables') + process.exit(1) + } + + return { browser, bundler, headless, reporter } +} + +function createBrowser({ browser: id, headless }) { + switch (id) { + case 'firefox': + return firefox.launch({ headless }) + case 'safari': + return webkit.launch({ headless }) + case 'edge': + return chromium.launch({ headless, channel: 'msedge' }) + default: + return chromium.launch({ headless }) + } +} + +function setupTape(page, configuration) { + const output = new Readable({ read() {} }) + const parser = new Parser({ strict: true }) + + output.pipe(parser) + + if (configuration.reporter) { + output.pipe(reporter('spec')) + } + + parser.on('line', (line) => { + if (line !== '# readable-stream-finished\n') { + if (line.startsWith('# not ok')) { + process.exitCode = 1 + } + + if (!configuration.reporter) { + console.log(line.replace(/\n$/, '')) + } + + return + } + + output.push(null) + + if (configuration.headless) { + browser.close() + } + }) + + page.on('console', (msg) => { + if (msg.type() === 'error') { + console.error(`\x1b[31m\x1b[1mconsole.error:\x1b[0m ${msg.text()}\n`) + return + } + + output.push(msg.text() + '\n') + }) + + // Firefox in headless mode is showing an error even if onerror caught it. Disable in that case + if (!configuration.headless || configuration.browser !== 'firefox') { + page.on('pageerror', (err) => { + console.log('\x1b[31m\x1b[1m--- The browser thrown an uncaught error ---\x1b[0m') + console.log(err.stack) + + if (configuration.headless) { + console.log('\x1b[31m\x1b[1m--- Exiting with exit code 1 ---\x1b[0m') + process.exit(1) + } else { + process.exitCode = 1 + } + }) + } +} + +const configuration = parseEnviroment() +const browser = await createBrowser(configuration) +const page = await browser.newPage() +setupTape(page, configuration) + +// Execute the test suite +await page.goto( + `file://${resolve(dirname(new URL(import.meta.url).pathname), `../../tmp/${configuration.bundler}/index.html`)}` +) diff --git a/test/browser/symbols.js b/test/browser/symbols.js new file mode 100644 index 0000000000..8450b8f64c --- /dev/null +++ b/test/browser/symbols.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = { + kReadableStreamSuiteName: Symbol('readable-stream.suiteName'), + kReadableStreamSuiteHasMultipleTests: Symbol('readable-stream.suiteHasMultipleTests') +} diff --git a/test/browser/test-browser.js b/test/browser/test-browser.js new file mode 100644 index 0000000000..dab1b16de9 --- /dev/null +++ b/test/browser/test-browser.js @@ -0,0 +1,126 @@ +'use strict' + +const tape = require('tape') + +const { createDeferredPromise } = require('../../lib/ours/util') + +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +let totalTests = 0 +let completed = 0 +let failed = 0 + +async function test(rootName, fn) { + // Gather all tests in the file + const tests = {} + + function addTests(name, fn) { + tests[`${rootName} - ${name}`] = fn + } + + if (fn[kReadableStreamSuiteHasMultipleTests]) { + fn(addTests) + } else { + tests[rootName] = fn + } // Execute each test in a separate harness and then output overall results + + for (const [name, subtest] of Object.entries(tests)) { + const currentIndex = ++totalTests + const harness = tape.createHarness() + const { promise, resolve } = createDeferredPromise() + const messages = [`# Subtest: ${name}`] + harness.createStream().on('data', function (row) { + if (row.startsWith('TAP version') || row.match(new RegExp(`^# (?:${name})`))) { + return + } + + messages.push(row.trim().replace(/^/gm, ' ')) + }) + harness.onFinish(() => { + const success = harness._exitCode === 0 + messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`) + console.log(messages.join('\n')) + completed++ + + if (!success) { + failed++ + } + + resolve() + }) + harness(name, subtest) + await promise + } +} + +async function runTests(suites) { + // Setup an interval + const interval = setInterval(() => { + if (completed < totalTests) { + return + } + + clearInterval(interval) + console.log(`1..${totalTests}`) + console.log(`# tests ${totalTests}`) + console.log(`# pass ${completed - failed}`) + console.log(`# fail ${failed}`) + console.log(`# ${failed === 0 ? 'ok' : 'not ok'}`) // This line is used by the playwright script to detect we're done + + console.log('# readable-stream-finished') + }, 100) // Execute each test serially, to avoid side-effects errors when dealing with global error handling + + for (const suite of suites) { + await test(suite[kReadableStreamSuiteName], suite) + } +} // Important: Do not try to make the require dynamic because bundlers will not like it + +runTests([ + require('./test-stream-big-packet'), + require('./test-stream-big-push'), + require('./test-stream-duplex'), + require('./test-stream-end-paused'), + require('./test-stream-finished'), + require('./test-stream-ispaused'), + require('./test-stream-pipe-after-end'), + require('./test-stream-pipe-cleanup-pause'), + require('./test-stream-pipe-cleanup'), + require('./test-stream-pipe-error-handling'), + require('./test-stream-pipe-event'), + require('./test-stream-pipe-without-listenerCount'), + require('./test-stream-pipeline'), + require('./test-stream-push-order'), + require('./test-stream-push-strings'), + require('./test-stream-readable-constructor-set-methods'), + require('./test-stream-readable-event'), + require('./test-stream-sync-write'), + require('./test-stream-transform-constructor-set-methods'), + require('./test-stream-transform-objectmode-falsey-value'), + require('./test-stream-transform-split-objectmode'), + require('./test-stream-unshift-empty-chunk'), + require('./test-stream-unshift-read-race'), + require('./test-stream-writable-change-default-encoding'), + require('./test-stream-writable-constructor-set-methods'), + require('./test-stream-writable-decoded-encoding'), + require('./test-stream-writev'), + require('./test-stream2-base64-single-char-read-end'), + require('./test-stream2-compatibility'), + require('./test-stream2-large-read-stall'), + require('./test-stream2-objects'), + require('./test-stream2-pipe-error-handling'), + require('./test-stream2-pipe-error-once-listener'), + require('./test-stream2-push'), + require('./test-stream2-readable-empty-buffer-no-eof'), + require('./test-stream2-readable-from-list'), + require('./test-stream2-readable-legacy-drain'), + require('./test-stream2-readable-non-empty-end'), + require('./test-stream2-readable-wrap-empty'), + require('./test-stream2-readable-wrap'), + require('./test-stream2-set-encoding'), + require('./test-stream2-transform'), + require('./test-stream2-unpipe-drain'), + require('./test-stream2-writable'), + require('./test-stream3-pause-then-read') +]).catch((e) => { + console.error(e) +}) diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js index 8099aa3bfd..f01f59f689 100644 --- a/test/browser/test-stream-big-packet.js +++ b/test/browser/test-stream-big-packet.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Transform } = require('../../lib/ours/index') -test('big packet', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(3) let passed = false @@ -68,4 +68,6 @@ test('big packet', function (t) { return -1 } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-big-packet' diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js index b0fba6e365..01398e63e9 100644 --- a/test/browser/test-stream-big-push.js +++ b/test/browser/test-stream-big-push.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('big push', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(10) const str = 'asdfasdfasdfasdfasdf' const r = new Readable({ @@ -59,4 +59,6 @@ test('big push', function (t) { t.ok(ended) t.equal(reads, 2) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-big-push' diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js index 1f56ee3f31..1d768bb393 100644 --- a/test/browser/test-stream-duplex.js +++ b/test/browser/test-stream-duplex.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Duplex } = require('../../lib/ours/index') -test('duplex', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(4) const stream = new Duplex({ objectMode: true @@ -35,4 +35,6 @@ test('duplex', function (t) { val: 2 }) stream.push(null) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-duplex' diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js index 4130ceaf9d..722db644b2 100644 --- a/test/browser/test-stream-end-paused.js +++ b/test/browser/test-stream-end-paused.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('end pause', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(2) const stream = new Readable() let calledRead = false @@ -25,4 +25,6 @@ test('end pause', function (t) { }) stream.resume() }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-end-paused' diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js index 73bc4b63ba..cec12616e2 100644 --- a/test/browser/test-stream-finished.js +++ b/test/browser/test-stream-finished.js @@ -1,52 +1,57 @@ 'use strict' -const test = require('tape') - const { Writable, Readable, Transform, finished } = require('../../lib/ours/index') -test('readable finished', function (t) { - t.plan(1) - const rs = new Readable({ - read: function read() {} - }) - finished(rs, (err) => { - t.ifErr(err) - }) - rs.push(null) - rs.resume() -}) -test('writable finished', function (t) { - t.plan(1) - const ws = new Writable({ - write: function write(data, enc, cb) { - cb() - } - }) - finished(ws, (err) => { - t.ifErr(err) - }) - ws.end() -}) -test('transform finished', function (t) { - t.plan(3) - const tr = new Transform({ - transform: function transform(data, enc, cb) { - cb() - } - }) - let finish = false - let ended = false - tr.on('end', function () { - ended = true - }) - tr.on('finish', function () { - finish = true - }) - finished(tr, (err) => { - t.ifErr(err) - t.ok(finish) - t.ok(ended) - }) - tr.end() - tr.resume() -}) +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +module.exports = function (test) { + test('readable finished', function (t) { + t.plan(1) + const rs = new Readable({ + read: function read() {} + }) + finished(rs, (err) => { + t.ifErr(err) + }) + rs.push(null) + rs.resume() + }) + test('writable finished', function (t) { + t.plan(1) + const ws = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) + finished(ws, (err) => { + t.ifErr(err) + }) + ws.end() + }) + test('transform finished', function (t) { + t.plan(3) + const tr = new Transform({ + transform: function transform(data, enc, cb) { + cb() + } + }) + let finish = false + let ended = false + tr.on('end', function () { + ended = true + }) + tr.on('finish', function () { + finish = true + }) + finished(tr, (err) => { + t.ifErr(err) + t.ok(finish) + t.ok(ended) + }) + tr.end() + tr.resume() + }) +} + +module.exports[kReadableStreamSuiteName] = 'stream-finished' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js index f30337cad2..3cc378e909 100644 --- a/test/browser/test-stream-ispaused.js +++ b/test/browser/test-stream-ispaused.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const stream = require('../../lib/ours/index') -test('is paused', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(4) const readable = new stream.Readable() // _read is a noop, here. @@ -19,4 +19,6 @@ test('is paused', function (t) { t.ok(readable.isPaused()) readable.resume() t.notOk(readable.isPaused()) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-ispaused' diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js index 10270b44f3..ba65ee54ab 100644 --- a/test/browser/test-stream-pipe-after-end.js +++ b/test/browser/test-stream-pipe-after-end.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Readable, Writable } = require('../../lib/ours/index') -test('pipe after end', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(4) function TestReadable(opt) { @@ -66,4 +66,6 @@ test('pipe after end', function (t) { }) piper.pipe(w) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-after-end' diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js index 36ad573dfa..907551ed44 100644 --- a/test/browser/test-stream-pipe-cleanup-pause.js +++ b/test/browser/test-stream-pipe-cleanup-pause.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const stream = require('../../lib/ours/index') -test('pipe cleanup pause', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(3) const reader = new stream.Readable() const writer1 = new stream.Writable() @@ -40,4 +40,6 @@ test('pipe cleanup pause', function (t) { reader.pipe(writer1) reader.push(buffer) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup-pause' diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js index 34c2cd3496..ab8d981f6a 100644 --- a/test/browser/test-stream-pipe-cleanup.js +++ b/test/browser/test-stream-pipe-cleanup.js @@ -1,13 +1,13 @@ 'use strict' // This test asserts that Stream.prototype.pipe does not leave listeners // hanging on the source or dest. -const test = require('tape') - const inherits = require('inherits') const { Stream } = require('../../lib/ours/index') -test('pipe cleanup', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(27) if (/^v0\.8\./.test(process.version)) { @@ -121,4 +121,6 @@ test('pipe cleanup', function (t) { t.equal(w.listeners('end').length, 0) t.equal(w.listeners('close').length, 0) d.end() -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup' diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js index 0e4bdb182e..826ec775ae 100644 --- a/test/browser/test-stream-pipe-error-handling.js +++ b/test/browser/test-stream-pipe-error-handling.js @@ -1,98 +1,104 @@ 'use strict' -const test = require('tape') - const { Readable, Writable, Stream } = require('../../lib/ours/index') -test('Error Listener Catches', function (t) { - t.plan(1) - const source = new Stream() - const dest = new Stream() +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') - source._read = function () {} +module.exports = function (test) { + test('Error Listener Catches', function (t) { + t.plan(1) + const source = new Stream() + const dest = new Stream() - source.pipe(dest) - let gotErr = null - source.on('error', function (err) { - gotErr = err - }) - const err = new Error('This stream turned into bacon.') - source.emit('error', err) - t.strictEqual(gotErr, err) -}) -test('Error WithoutListener Throws', function (t) { - t.plan(1) - const source = new Stream() - const dest = new Stream() - - source._read = function () {} - - source.pipe(dest) - const err = new Error('This stream turned into bacon.') - let gotErr = null - - try { - source.emit('error', err) - } catch (e) { - gotErr = e - } - - t.strictEqual(gotErr, err) -}) -test('Error With Removed Listener Throws', function (t) { - t.plan(2) - const onerror = global.onerror - const r = new Readable() - const w = new Writable() - let removed = false - let caught = false - - global.onerror = () => { - t.notOk(caught) - global.onerror = onerror - } - - r._read = function () { - setTimeout(function () { - t.ok(removed) - w.emit('error', new Error('fail')) - }) - } - - w.on('error', myOnError) - r.pipe(w) - w.removeListener('error', myOnError) - removed = true - - function myOnError(er) { - caught = true - } -}) -test('Error Listener Catches When Wrong Listener Is Removed', function (t) { - t.plan(2) - const r = new Readable() - const w = new Writable() - let removed = false - let caught = false - - r._read = function () { - setTimeout(function () { - t.ok(removed) - w.emit('error', new Error('fail')) + source._read = function () {} + + source.pipe(dest) + let gotErr = null + source.on('error', function (err) { + gotErr = err }) - } + const err = new Error('This stream turned into bacon.') + source.emit('error', err) + t.strictEqual(gotErr, err) + }) + test('Error WithoutListener Throws', function (t) { + t.plan(1) + const source = new Stream() + const dest = new Stream() + + source._read = function () {} - w.on('error', myOnError) + source.pipe(dest) + const err = new Error('This stream turned into bacon.') + let gotErr = null - w._write = function () {} + try { + source.emit('error', err) + } catch (e) { + gotErr = e + } - r.pipe(w) // Removing some OTHER random listener should not do anything + t.strictEqual(gotErr, err) + }) + test('Error With Removed Listener Throws', function (t) { + t.plan(2) + const onerror = global.onerror + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + global.onerror = () => { + t.notOk(caught) + global.onerror = onerror + return true + } + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) + r.pipe(w) + w.removeListener('error', myOnError) + removed = true + + function myOnError(er) { + caught = true + } + }) + test('Error Listener Catches When Wrong Listener Is Removed', function (t) { + t.plan(2) + const r = new Readable() + const w = new Writable() + let removed = false + let caught = false + + r._read = function () { + setTimeout(function () { + t.ok(removed) + w.emit('error', new Error('fail')) + }) + } + + w.on('error', myOnError) - w.removeListener('error', function () {}) - removed = true + w._write = function () {} + + r.pipe(w) // Removing some OTHER random listener should not do anything + + w.removeListener('error', function () {}) + removed = true + + function myOnError(er) { + t.notOk(caught) + caught = true + } + }) +} - function myOnError(er) { - t.notOk(caught) - caught = true - } -}) +module.exports[kReadableStreamSuiteName] = 'stream-pipe-error-handling' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js index e173829cf7..4ec67cecc2 100644 --- a/test/browser/test-stream-pipe-event.js +++ b/test/browser/test-stream-pipe-event.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Stream } = require('../../lib/ours/index') -test('pipe event', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) function Writable() { @@ -33,4 +33,6 @@ test('pipe event', function (t) { r.pipe(w) t.ok(passed) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-event' diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js index 9b9790560f..ecd414a2f4 100644 --- a/test/browser/test-stream-pipe-without-listenerCount.js +++ b/test/browser/test-stream-pipe-without-listenerCount.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Stream } = require('../../lib/ours/index') -test('pipe without listenerCount on read', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) const r = new Stream({ read: function () {} @@ -15,4 +15,6 @@ test('pipe without listenerCount on read', function (t) { r.emit('error', new Error('Readable Error')) }) t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function') -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-pipe-without-listenerCount' diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js index ff20df05ae..f1e2c18b33 100644 --- a/test/browser/test-stream-pipeline.js +++ b/test/browser/test-stream-pipeline.js @@ -1,97 +1,102 @@ 'use strict' -const test = require('tape') - const { Readable, Writable, pipeline } = require('../../lib/ours/index') -test('pipeline', function (t) { - t.plan(3) - let finished = false - const processed = [] - const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] - const read = new Readable({ - read: function read() {} - }) - const write = new Writable({ - write: function write(data, enc, cb) { - processed.push(data) - cb() +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +module.exports = function (test) { + test('pipeline', function (t) { + t.plan(3) + let finished = false + const processed = [] + const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')] + const read = new Readable({ + read: function read() {} + }) + const write = new Writable({ + write: function write(data, enc, cb) { + processed.push(data) + cb() + } + }) + write.on('finish', function () { + finished = true + }) + + for (let i = 0; i < expected.length; i++) { + read.push(expected[i]) } - }) - write.on('finish', function () { - finished = true - }) - for (let i = 0; i < expected.length; i++) { - read.push(expected[i]) - } + read.push(null) + pipeline(read, write, (err) => { + t.ifErr(err) + t.ok(finished) + t.deepEqual(processed, expected) + }) + }) + test('pipeline missing args', function (t) { + t.plan(3) - read.push(null) - pipeline(read, write, (err) => { - t.ifErr(err) - t.ok(finished) - t.deepEqual(processed, expected) - }) -}) -test('pipeline missing args', function (t) { - t.plan(3) + const _read = new Readable({ + read: function read() {} + }) - const _read = new Readable({ - read: function read() {} - }) + t.throws(function () { + pipeline(_read, function () {}) + }) + t.throws(function () { + pipeline(function () {}) + }) + t.throws(function () { + pipeline() + }) + }) + test('pipeline error', function (t) { + t.plan(1) - t.throws(function () { - pipeline(_read, function () {}) - }) - t.throws(function () { - pipeline(function () {}) - }) - t.throws(function () { - pipeline() - }) -}) -test('pipeline error', function (t) { - t.plan(1) + const _read2 = new Readable({ + read: function read() {} + }) - const _read2 = new Readable({ - read: function read() {} - }) + const _write = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) - const _write = new Writable({ - write: function write(data, enc, cb) { - cb() - } - }) + _read2.push('data') - _read2.push('data') + setImmediate(function () { + return _read2.destroy() + }) + pipeline(_read2, _write, (err) => { + t.equal(err.message, 'Premature close') + }) + }) + test('pipeline destroy', function (t) { + t.plan(2) - setImmediate(function () { - return _read2.destroy() - }) - pipeline(_read2, _write, (err) => { - t.equal(err.message, 'Premature close') - }) -}) -test('pipeline destroy', function (t) { - t.plan(2) + const _read3 = new Readable({ + read: function read() {} + }) - const _read3 = new Readable({ - read: function read() {} - }) + const _write2 = new Writable({ + write: function write(data, enc, cb) { + cb() + } + }) - const _write2 = new Writable({ - write: function write(data, enc, cb) { - cb() - } - }) + _read3.push('data') - _read3.push('data') + setImmediate(function () { + return _read3.destroy(new Error('kaboom')) + }) + const dst = pipeline(_read3, _write2, (err) => { + t.equal(err.message, 'kaboom') + }) + t.equal(dst, _write2) + }) +} - setImmediate(function () { - return _read3.destroy(new Error('kaboom')) - }) - const dst = pipeline(_read3, _write2, (err) => { - t.equal(err.message, 'kaboom') - }) - t.equal(dst, _write2) -}) +module.exports[kReadableStreamSuiteName] = 'stream-pipeline' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js index 5bb19aad90..430182c3c9 100644 --- a/test/browser/test-stream-push-order.js +++ b/test/browser/test-stream-push-order.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('push order', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) const s = new Readable({ highWaterMark: 20, @@ -28,4 +28,6 @@ test('push order', function (t) { setTimeout(function () { t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6') }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-push-order' diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js index 4cb45d9e68..fb20f3cd39 100644 --- a/test/browser/test-stream-push-strings.js +++ b/test/browser/test-stream-push-strings.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Readable } = require('../../lib/ours/index') -test('push strings', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(2) function MyStream(options) { @@ -58,4 +58,6 @@ test('push strings', function (t) { t.equal(ms._chunks, -1) t.deepEqual(results, expect) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-push-strings' diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js index be77969f44..8461661d9d 100644 --- a/test/browser/test-stream-readable-constructor-set-methods.js +++ b/test/browser/test-stream-readable-constructor-set-methods.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('readable constructor set methods', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(2) let _readCalled = false @@ -21,4 +21,6 @@ test('readable constructor set methods', function (t) { t.equal(r._read, _read) t.ok(_readCalled) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-readable-constructor-set-methods' diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js index 124f077dda..8784698a30 100644 --- a/test/browser/test-stream-readable-event.js +++ b/test/browser/test-stream-readable-event.js @@ -1,91 +1,96 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('readable events - first', (t) => { - t.plan(3) // First test, not reading when the readable is added. - // make sure that on('readable', ...) triggers a readable event. +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') - const r = new Readable({ - highWaterMark: 3 - }) - let _readCalled = false +module.exports = function (test) { + test('readable events - first', (t) => { + t.plan(3) // First test, not reading when the readable is added. + // make sure that on('readable', ...) triggers a readable event. - r._read = function (n) { - _readCalled = true - } // This triggers a 'readable' event, which is lost. + const r = new Readable({ + highWaterMark: 3 + }) + let _readCalled = false - r.push(Buffer.from('blerg')) - let caughtReadable = false - setTimeout(function () { - // we're testing what we think we are - t.notOk(r._readableState.reading) - r.on('readable', function () { - caughtReadable = true - setTimeout(function () { - // we're testing what we think we are - t.notOk(_readCalled) - t.ok(caughtReadable) + r._read = function (n) { + _readCalled = true + } // This triggers a 'readable' event, which is lost. + + r.push(Buffer.from('blerg')) + let caughtReadable = false + setTimeout(function () { + // we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + t.ok(caughtReadable) + }) }) }) }) -}) -test('readable events - second', (t) => { - t.plan(3) // second test, make sure that readable is re-emitted if there's - // already a length, while it IS reading. + test('readable events - second', (t) => { + t.plan(3) // second test, make sure that readable is re-emitted if there's + // already a length, while it IS reading. - const r = new Readable({ - highWaterMark: 3 - }) - let _readCalled = false + const r = new Readable({ + highWaterMark: 3 + }) + let _readCalled = false - r._read = function (n) { - _readCalled = true - } // This triggers a 'readable' event, which is lost. + r._read = function (n) { + _readCalled = true + } // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('bl')) - let caughtReadable = false - setTimeout(function () { - // assert we're testing what we think we are - t.ok(r._readableState.reading) - r.on('readable', function () { - caughtReadable = true - setTimeout(function () { - // we're testing what we think we are - t.ok(_readCalled) - t.ok(caughtReadable) + r.push(Buffer.from('bl')) + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.ok(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.ok(_readCalled) + t.ok(caughtReadable) + }) }) }) }) -}) -test('readable events - third', (t) => { - t.plan(3) // Third test, not reading when the stream has not passed - // the highWaterMark but *has* reached EOF. + test('readable events - third', (t) => { + t.plan(3) // Third test, not reading when the stream has not passed + // the highWaterMark but *has* reached EOF. - const r = new Readable({ - highWaterMark: 30 - }) - let _readCalled = false + const r = new Readable({ + highWaterMark: 30 + }) + let _readCalled = false - r._read = function (n) { - _readCalled = true - } // This triggers a 'readable' event, which is lost. + r._read = function (n) { + _readCalled = true + } // This triggers a 'readable' event, which is lost. - r.push(Buffer.from('blerg')) - r.push(null) - let caughtReadable = false - setTimeout(function () { - // assert we're testing what we think we are - t.notOk(r._readableState.reading) - r.on('readable', function () { - caughtReadable = true - setTimeout(function () { - // we're testing what we think we are - t.notOk(_readCalled) - t.ok(caughtReadable) + r.push(Buffer.from('blerg')) + r.push(null) + let caughtReadable = false + setTimeout(function () { + // assert we're testing what we think we are + t.notOk(r._readableState.reading) + r.on('readable', function () { + caughtReadable = true + setTimeout(function () { + // we're testing what we think we are + t.notOk(_readCalled) + t.ok(caughtReadable) + }) }) }) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-readable-event' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js index 9195853a0d..2ab9b4e512 100644 --- a/test/browser/test-stream-sync-write.js +++ b/test/browser/test-stream-sync-write.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Writable } = require('../../lib/ours/index') -test('should bea ble to write sync', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(2) let internalCalls = 0 let externalCalls = 0 @@ -47,4 +47,6 @@ test('should bea ble to write sync', function (t) { t.equal(internalCalls, 2000) t.equal(externalCalls, 2000) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-sync-write' diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js index d9ff9e39fd..257224c122 100644 --- a/test/browser/test-stream-transform-constructor-set-methods.js +++ b/test/browser/test-stream-transform-constructor-set-methods.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Transform } = require('../../lib/ours/index') -test('transform constructor set methods', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(4) let _transformCalled = false @@ -32,4 +32,6 @@ test('transform constructor set methods', function (t) { t.ok(_transformCalled) t.ok(_flushCalled) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-transform-constructor-set-methods' diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js index 2d043fc3df..69a9876497 100644 --- a/test/browser/test-stream-transform-objectmode-falsey-value.js +++ b/test/browser/test-stream-transform-objectmode-falsey-value.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { PassThrough } = require('../../lib/ours/index') -test('transform objectmode falsey value', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(13) const src = new PassThrough({ objectMode: true @@ -34,4 +34,6 @@ test('transform objectmode falsey value', function (t) { src.write(i++) } }, 10) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-transform-objectmode-falsey-value' diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js index 256b86cc29..551dfcbe8b 100644 --- a/test/browser/test-stream-transform-split-objectmode.js +++ b/test/browser/test-stream-transform-split-objectmode.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Transform } = require('../../lib/ours/index') -test('transform split objectmode', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(10) const parser = new Transform({ readableObjectMode: true @@ -53,4 +53,6 @@ test('transform split objectmode', function (t) { setImmediate(function () { serializer.end() }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-transform-split-objectmode' diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js index dea52a22f5..e0494e2554 100644 --- a/test/browser/test-stream-unshift-empty-chunk.js +++ b/test/browser/test-stream-unshift-empty-chunk.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('unshift empty chunk', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) const r = new Readable() let nChunks = 10 @@ -58,4 +58,6 @@ test('unshift empty chunk', function (t) { r.on('end', function () { t.deepEqual(seen, expect) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-unshift-empty-chunk' diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js index 68983e643f..3e68b3cfdd 100644 --- a/test/browser/test-stream-unshift-read-race.js +++ b/test/browser/test-stream-unshift-read-race.js @@ -5,11 +5,11 @@ // 3. push() after the EOF signaling null is an error. // 4. _read() is not called after pushing the EOF null chunk. -const test = require('tape') - const stream = require('../../lib/ours/index') -test('unshift read race', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(139) const hwm = 10 const r = stream.Readable({ @@ -61,9 +61,10 @@ test('unshift read race', function (t) { w.end() const onerror = global.onerror - global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { + global.onerror = () => { t.ok(true) global.onerror = onerror + return true } r.push(Buffer.allocUnsafe(1)) @@ -126,4 +127,6 @@ test('unshift read race', function (t) { t.equal(written.length, 18) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-unshift-read-race' diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js index 89652f8e8e..54426e56d3 100644 --- a/test/browser/test-stream-writable-change-default-encoding.js +++ b/test/browser/test-stream-writable-change-default-encoding.js @@ -1,11 +1,11 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + inherits(MyWritable, stream.Writable) MyWritable.prototype._write = function (chunk, encoding, callback) { @@ -18,55 +18,60 @@ function MyWritable(fn, options) { this.fn = fn } -test('defaultCondingIsUtf8', (t) => { - t.plan(1) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.equal(enc, 'utf8') - }, - { - decodeStrings: false - } - ) - m.write('foo') - m.end() -}) -test('changeDefaultEncodingToAscii', (t) => { - t.plan(1) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.equal(enc, 'ascii') - }, - { - decodeStrings: false - } - ) - m.setDefaultEncoding('ascii') - m.write('bar') - m.end() -}) -test('changeDefaultEncodingToInvalidValue', (t) => { - t.plan(1) - t.throws(function () { - const m = new MyWritable(function (isBuffer, type, enc) {}, { - decodeStrings: false - }) - m.setDefaultEncoding({}) +module.exports = function (test) { + test('defaultCondingIsUtf8', (t) => { + t.plan(1) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'utf8') + }, + { + decodeStrings: false + } + ) + m.write('foo') + m.end() + }) + test('changeDefaultEncodingToAscii', (t) => { + t.plan(1) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { + decodeStrings: false + } + ) + m.setDefaultEncoding('ascii') m.write('bar') m.end() - }, TypeError) -}) -test('checkVairableCaseEncoding', (t) => { - t.plan(1) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.equal(enc, 'ascii') - }, - { - decodeStrings: false - } - ) - m.setDefaultEncoding('AsCii') - m.write('bar') - m.end() -}) + }) + test('changeDefaultEncodingToInvalidValue', (t) => { + t.plan(1) + t.throws(function () { + const m = new MyWritable(function (isBuffer, type, enc) {}, { + decodeStrings: false + }) + m.setDefaultEncoding({}) + m.write('bar') + m.end() + }, TypeError) + }) + test('checkVairableCaseEncoding', (t) => { + t.plan(1) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.equal(enc, 'ascii') + }, + { + decodeStrings: false + } + ) + m.setDefaultEncoding('AsCii') + m.write('bar') + m.end() + }) +} + +module.exports[kReadableStreamSuiteName] = 'stream-writable-change-default-encoding' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js index 952b46c1d5..d8beab46b4 100644 --- a/test/browser/test-stream-writable-constructor-set-methods.js +++ b/test/browser/test-stream-writable-constructor-set-methods.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Writable } = require('../../lib/ours/index') -test('writable constructor set methods', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(5) let _writeCalled = false @@ -38,4 +38,6 @@ test('writable constructor set methods', function (t) { t.equal(dLength, 2) t.ok(_writevCalled) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream-writable-constructor-set-methods' diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js index c4b39346e6..521b71fe6c 100644 --- a/test/browser/test-stream-writable-decoded-encoding.js +++ b/test/browser/test-stream-writable-decoded-encoding.js @@ -1,11 +1,11 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + function MyWritable(fn, options) { stream.Writable.call(this, options) this.fn = fn @@ -18,33 +18,38 @@ MyWritable.prototype._write = function (chunk, encoding, callback) { callback() } -test('decodeStringsTrue', (t) => { - t.plan(3) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.ok(isBuffer) - t.equal(type, 'object') - t.equal(enc, 'buffer') // console.log('ok - decoded string is decoded'); - }, - { - decodeStrings: true - } - ) - m.write('some-text', 'utf8') - m.end() -}) -test('decodeStringsFalse', (t) => { - t.plan(3) - const m = new MyWritable( - function (isBuffer, type, enc) { - t.notOk(isBuffer) - t.equal(type, 'string') - t.equal(enc, 'utf8') // console.log('ok - un-decoded string is not decoded'); - }, - { - decodeStrings: false - } - ) - m.write('some-text', 'utf8') - m.end() -}) +module.exports = function (test) { + test('decodeStringsTrue', (t) => { + t.plan(3) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.ok(isBuffer) + t.equal(type, 'object') + t.equal(enc, 'buffer') // console.log('ok - decoded string is decoded'); + }, + { + decodeStrings: true + } + ) + m.write('some-text', 'utf8') + m.end() + }) + test('decodeStringsFalse', (t) => { + t.plan(3) + const m = new MyWritable( + function (isBuffer, type, enc) { + t.notOk(isBuffer) + t.equal(type, 'string') + t.equal(enc, 'utf8') // console.log('ok - un-decoded string is not decoded'); + }, + { + decodeStrings: false + } + ) + m.write('some-text', 'utf8') + m.end() + }) +} + +module.exports[kReadableStreamSuiteName] = 'stream-writable-decoded-encoding' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js index a85cafe98f..f8966ee1d9 100644 --- a/test/browser/test-stream-writev.js +++ b/test/browser/test-stream-writev.js @@ -1,9 +1,9 @@ 'use strict' -const test = require('tape') - const stream = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + const queue = [] for (let decode = 0; decode < 2; decode++) { @@ -129,7 +129,12 @@ function runTest(decode, uncork, multi) { } } -for (let i = 0; i < queue.length; i++) { - const tr = queue[i] - test('round ' + i, runTest(tr[0], tr[1], tr[2])) +module.exports = function (test) { + for (let i = 0; i < queue.length; i++) { + const tr = queue[i] + test('round ' + i, runTest(tr[0], tr[1], tr[2])) + } } + +module.exports[kReadableStreamSuiteName] = 'stream-writev' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js index 916ddd2b74..10f8378a9d 100644 --- a/test/browser/test-stream2-base64-single-char-read-end.js +++ b/test/browser/test-stream2-base64-single-char-read-end.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable, Writable } = require('../../lib/ours/index') -test('base64 single char read end', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) const src = new Readable({ encoding: 'base64' @@ -36,4 +36,6 @@ test('base64 single char read end', function (t) { const timeout = setTimeout(function () { t.fail('timed out waiting for _write') }, 100) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-base64-single-char-read-end' diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js index 7b61cfbd6c..940f8d1b0e 100644 --- a/test/browser/test-stream2-compatibility.js +++ b/test/browser/test-stream2-compatibility.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Readable } = require('../../lib/ours/index') -test('compatibility', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) let ondataCalled = 0 @@ -32,4 +32,6 @@ test('compatibility', function (t) { t.equal(ondataCalled, 1) }) new TestReader().read() -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-compatibility' diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js index 10d50e2a70..ee7ee033f5 100644 --- a/test/browser/test-stream2-large-read-stall.js +++ b/test/browser/test-stream2-large-read-stall.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('large object read stall', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) // If everything aligns so that you do a read(n) of exactly the // remaining buffer, then make sure that 'end' still emits. @@ -19,10 +19,11 @@ test('large object read stall', function (t) { r._read = push r.on('readable', function () { false && console.error('>> readable') + let ret do { false && console.error(' > read(%d)', READSIZE) - var ret = r.read(READSIZE) + ret = r.read(READSIZE) false && console.error(' < %j (%d remain)', ret && ret.length, rs.length) } while (ret && ret.length === READSIZE) @@ -52,4 +53,6 @@ test('large object read stall', function (t) { } // start the flow r.read(0) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-large-read-stall' diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js index 3bd2bac61e..679636a6ad 100644 --- a/test/browser/test-stream2-objects.js +++ b/test/browser/test-stream2-objects.js @@ -1,9 +1,15 @@ 'use strict' -const test = require('tape') - const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } +} + function toArray(callback) { const stream = new Writable({ objectMode: true @@ -35,305 +41,304 @@ function fromArray(list) { function noop() {} -test('can read objects from stream', function (t) { - t.plan(3) - const r = fromArray([ - { +module.exports = function (test) { + test('can read objects from stream', function (t) { + t.plan(3) + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) + const v1 = r.read() + const v2 = r.read() + const v3 = r.read() + t.deepEqual(v1, { one: '1' - }, - { + }) + t.deepEqual(v2, { two: '2' - } - ]) - const v1 = r.read() - const v2 = r.read() - const v3 = r.read() - t.deepEqual(v1, { - one: '1' + }) + t.deepEqual(v3, null) }) - t.deepEqual(v2, { - two: '2' + test('can pipe objects into stream', function (t) { + t.plan(1) + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) + r.pipe( + toArray(function (list) { + t.deepEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) + }) + ) }) - t.deepEqual(v3, null) -}) -test('can pipe objects into stream', function (t) { - t.plan(1) - const r = fromArray([ - { + test('read(n) is ignored', function (t) { + t.plan(1) + const r = fromArray([ + { + one: '1' + }, + { + two: '2' + } + ]) + const value = r.read(2) + t.deepEqual(value, { one: '1' - }, - { - two: '2' - } - ]) - r.pipe( - toArray(function (list) { - t.deepEqual(list, [ - { - one: '1' - }, - { - two: '2' - } - ]) }) - ) -}) -test('read(n) is ignored', function (t) { - t.plan(1) - const r = fromArray([ - { - one: '1' - }, - { - two: '2' - } - ]) - const value = r.read(2) - t.deepEqual(value, { - one: '1' - }) -}) -test('can read objects from _read (sync)', function (t) { - t.plan(1) - const r = new Readable({ - objectMode: true }) - const list = [ - { - one: '1' - }, - { - two: '2' - } - ] + test('can read objects from _read (sync)', function (t) { + t.plan(1) + const r = new Readable({ + objectMode: true + }) + const list = [ + { + one: '1' + }, + { + two: '2' + } + ] - r._read = function (n) { - const item = list.shift() - r.push(item || null) - } + r._read = function (n) { + const item = list.shift() + r.push(item || null) + } - r.pipe( - toArray(function (list) { - t.deepEqual(list, [ - { - one: '1' - }, - { - two: '2' - } - ]) - }) - ) -}) -test('can read objects from _read (async)', function (t) { - t.plan(1) - const r = new Readable({ - objectMode: true + r.pipe( + toArray(function (list) { + t.deepEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) + }) + ) }) - const list = [ - { - one: '1' - }, - { - two: '2' + test('can read objects from _read (async)', function (t) { + t.plan(1) + const r = new Readable({ + objectMode: true + }) + const list = [ + { + one: '1' + }, + { + two: '2' + } + ] + + r._read = function (n) { + const item = list.shift() + process.nextTick(function () { + r.push(item || null) + }) } - ] - r._read = function (n) { - const item = list.shift() - process.nextTick(function () { - r.push(item || null) + r.pipe( + toArray(function (list) { + t.deepEqual(list, [ + { + one: '1' + }, + { + two: '2' + } + ]) + }) + ) + }) + test('can read strings as objects', function (t) { + t.plan(1) + const r = new Readable({ + objectMode: true }) - } - - r.pipe( - toArray(function (list) { - t.deepEqual(list, [ - { - one: '1' - }, - { - two: '2' - } - ]) + r._read = noop + const list = ['one', 'two', 'three'] + forEach(list, function (str) { + r.push(str) }) - ) -}) -test('can read strings as objects', function (t) { - t.plan(1) - const r = new Readable({ - objectMode: true - }) - r._read = noop - const list = ['one', 'two', 'three'] - forEach(list, function (str) { - r.push(str) + r.push(null) + r.pipe( + toArray(function (array) { + t.deepEqual(array, list) + }) + ) }) - r.push(null) - r.pipe( - toArray(function (array) { - t.deepEqual(array, list) + test('read(0) for object streams', function (t) { + t.plan(1) + const r = new Readable({ + objectMode: true }) - ) -}) -test('read(0) for object streams', function (t) { - t.plan(1) - const r = new Readable({ - objectMode: true + r._read = noop + r.push('foobar') + r.push(null) + r.read(0) + r.pipe( + toArray(function (array) { + t.deepEqual(array, ['foobar']) + }) + ) }) - r._read = noop - r.push('foobar') - r.push(null) - r.read(0) - r.pipe( - toArray(function (array) { - t.deepEqual(array, ['foobar']) + test('falsey values', function (t) { + t.plan(1) + const r = new Readable({ + objectMode: true }) - ) -}) -test('falsey values', function (t) { - t.plan(1) - const r = new Readable({ - objectMode: true + r._read = noop + r.push(false) + r.push(0) + r.push('') + r.push(null) + r.pipe( + toArray(function (array) { + t.deepEqual(array, [false, 0, '']) + }) + ) }) - r._read = noop - r.push(false) - r.push(0) - r.push('') - r.push(null) - r.pipe( - toArray(function (array) { - t.deepEqual(array, [false, 0, '']) + test('high watermark _read', function (t) { + t.plan(5) + const r = new Readable({ + highWaterMark: 6, + objectMode: true }) - ) -}) -test('high watermark _read', function (t) { - t.plan(5) - const r = new Readable({ - highWaterMark: 6, - objectMode: true - }) - let calls = 0 - const list = ['1', '2', '3', '4', '5', '6', '7', '8'] + let calls = 0 + const list = ['1', '2', '3', '4', '5', '6', '7', '8'] - r._read = function (n) { - calls++ - } + r._read = function (n) { + calls++ + } - forEach(list, function (c) { - r.push(c) - }) - const v = r.read() - t.equal(calls, 0) - t.equal(v, '1') - const v2 = r.read() - t.equal(v2, '2') - const v3 = r.read() - t.equal(v3, '3') - t.equal(calls, 1) -}) -test('high watermark push', function (t) { - t.plan(6) - const r = new Readable({ - highWaterMark: 6, - objectMode: true + forEach(list, function (c) { + r.push(c) + }) + const v = r.read() + t.equal(calls, 0) + t.equal(v, '1') + const v2 = r.read() + t.equal(v2, '2') + const v3 = r.read() + t.equal(v3, '3') + t.equal(calls, 1) }) + test('high watermark push', function (t) { + t.plan(6) + const r = new Readable({ + highWaterMark: 6, + objectMode: true + }) - r._read = function (n) {} + r._read = function (n) {} - for (let i = 0; i < 6; i++) { - const bool = r.push(i) - t.equal(bool, i !== 5) - } -}) -test('can write objects to stream', function (t) { - t.plan(1) - const w = new Writable({ - objectMode: true + for (let i = 0; i < 6; i++) { + const bool = r.push(i) + t.equal(bool, i !== 5) + } }) + test('can write objects to stream', function (t) { + t.plan(1) + const w = new Writable({ + objectMode: true + }) + + w._write = function (chunk, encoding, cb) { + t.deepEqual(chunk, { + foo: 'bar' + }) + cb() + } - w._write = function (chunk, encoding, cb) { - t.deepEqual(chunk, { + w.on('finish', function () {}) + w.write({ foo: 'bar' }) - cb() - } - - w.on('finish', function () {}) - w.write({ - foo: 'bar' + w.end() }) - w.end() -}) -test('can write multiple objects to stream', function (t) { - t.plan(1) - const w = new Writable({ - objectMode: true - }) - const list = [] + test('can write multiple objects to stream', function (t) { + t.plan(1) + const w = new Writable({ + objectMode: true + }) + const list = [] - w._write = function (chunk, encoding, cb) { - list.push(chunk) - cb() - } + w._write = function (chunk, encoding, cb) { + list.push(chunk) + cb() + } - w.on('finish', function () { - t.deepEqual(list, [0, 1, 2, 3, 4]) - }) - w.write(0) - w.write(1) - w.write(2) - w.write(3) - w.write(4) - w.end() -}) -test('can write strings as objects', function (t) { - t.plan(1) - const w = new Writable({ - objectMode: true + w.on('finish', function () { + t.deepEqual(list, [0, 1, 2, 3, 4]) + }) + w.write(0) + w.write(1) + w.write(2) + w.write(3) + w.write(4) + w.end() }) - const list = [] + test('can write strings as objects', function (t) { + t.plan(1) + const w = new Writable({ + objectMode: true + }) + const list = [] - w._write = function (chunk, encoding, cb) { - list.push(chunk) - process.nextTick(cb) - } + w._write = function (chunk, encoding, cb) { + list.push(chunk) + process.nextTick(cb) + } - w.on('finish', function () { - t.deepEqual(list, ['0', '1', '2', '3', '4']) - }) - w.write('0') - w.write('1') - w.write('2') - w.write('3') - w.write('4') - w.end() -}) -test('buffers finish until cb is called', function (t) { - t.plan(2) - const w = new Writable({ - objectMode: true + w.on('finish', function () { + t.deepEqual(list, ['0', '1', '2', '3', '4']) + }) + w.write('0') + w.write('1') + w.write('2') + w.write('3') + w.write('4') + w.end() }) - let called = false - - w._write = function (chunk, encoding, cb) { - t.equal(chunk, 'foo') - process.nextTick(function () { - called = true - cb() + test('buffers finish until cb is called', function (t) { + t.plan(2) + const w = new Writable({ + objectMode: true }) - } + let called = false - w.on('finish', function () { - t.equal(called, true) - }) - w.write('foo') - w.end() -}) + w._write = function (chunk, encoding, cb) { + t.equal(chunk, 'foo') + process.nextTick(function () { + called = true + cb() + }) + } -function forEach(xs, f) { - for (let i = 0, l = xs.length; i < l; i++) { - f(xs[i], i) - } + w.on('finish', function () { + t.equal(called, true) + }) + w.write('foo') + w.end() + }) } + +module.exports[kReadableStreamSuiteName] = 'stream2-objects' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js index e4c74506b9..2dd3e31821 100644 --- a/test/browser/test-stream2-pipe-error-handling.js +++ b/test/browser/test-stream2-pipe-error-handling.js @@ -1,85 +1,91 @@ 'use strict' -const test = require('tape') - const stream = require('../../lib/ours/index') -test('Error Listener Catches', function (t) { - t.plan(3) - let count = 1000 - const source = new stream.Readable() - - source._read = function (n) { - n = Math.min(count, n) - count -= n - source.push(Buffer.alloc(n)) - } - - let unpipedDest - - source.unpipe = function (dest) { - unpipedDest = dest - stream.Readable.prototype.unpipe.call(this, dest) - } - - const dest = new stream.Writable() - - dest._write = function (chunk, encoding, cb) { - cb() - } - - source.pipe(dest) - let gotErr = null - dest.on('error', function (err) { - gotErr = err - }) - let unpipedSource - dest.on('unpipe', function (src) { - unpipedSource = src - }) - const err = new Error('This stream turned into bacon.') - dest.emit('error', err) - t.strictEqual(gotErr, err) - t.strictEqual(unpipedSource, source) - t.strictEqual(unpipedDest, dest) -}) -test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { - t.plan(3) - let count = 1000 - const source = new stream.Readable() - - source._read = function (n) { - n = Math.min(count, n) - count -= n - source.push(Buffer.alloc(n)) - } - - let unpipedDest - - source.unpipe = function (dest) { - unpipedDest = dest - stream.Readable.prototype.unpipe.call(this, dest) - } - - const dest = new stream.Writable() - - dest._write = function (chunk, encoding, cb) { - cb() - } - - source.pipe(dest) - let unpipedSource - dest.on('unpipe', function (src) { - unpipedSource = src - }) - const err = new Error('This stream turned into bacon.') - const onerror = global.onerror - dest.emit('error', err) - - global.onerror = (_u1, _u2, _u3, _u4, gotErr) => { +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +module.exports = function (test) { + test('Error Listener Catches', function (t) { + t.plan(3) + let count = 1000 + const source = new stream.Readable() + + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + let gotErr = null + dest.on('error', function (err) { + gotErr = err + }) + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + const err = new Error('This stream turned into bacon.') + dest.emit('error', err) t.strictEqual(gotErr, err) t.strictEqual(unpipedSource, source) t.strictEqual(unpipedDest, dest) - global.onerror = onerror - } -}) + }) + test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) { + t.plan(3) + let count = 1000 + const source = new stream.Readable() + + source._read = function (n) { + n = Math.min(count, n) + count -= n + source.push(Buffer.alloc(n)) + } + + let unpipedDest + + source.unpipe = function (dest) { + unpipedDest = dest + stream.Readable.prototype.unpipe.call(this, dest) + } + + const dest = new stream.Writable() + + dest._write = function (chunk, encoding, cb) { + cb() + } + + source.pipe(dest) + let unpipedSource + dest.on('unpipe', function (src) { + unpipedSource = src + }) + const err = new Error('This stream turned into bacon.') + const onerror = global.onerror + dest.emit('error', err) + + global.onerror = () => { + t.ok(true) + t.strictEqual(unpipedSource, source) + t.strictEqual(unpipedDest, dest) + global.onerror = onerror + return true + } + }) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-handling' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js index da7020bcdd..c5696778a5 100644 --- a/test/browser/test-stream2-pipe-error-once-listener.js +++ b/test/browser/test-stream2-pipe-error-once-listener.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const stream = require('../../lib/ours/index') -test('pipe error once listener', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) const Read = function () { @@ -38,4 +38,6 @@ test('pipe error once listener', function (t) { t.ok(true) }) read.pipe(write) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-once-listener' diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js index 5e6cdfb18e..0c3307f516 100644 --- a/test/browser/test-stream2-push.js +++ b/test/browser/test-stream2-push.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const { EventEmitter: EE } = require('events') const { Readable, Writable } = require('../../lib/ours/index') -test('push', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(33) const stream = new Readable({ highWaterMark: 16, @@ -109,4 +109,6 @@ test('push', function (t) { t.ok(ended) }) } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-push' diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js index e1ab001ded..1a7b611b36 100644 --- a/test/browser/test-stream2-readable-empty-buffer-no-eof.js +++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js @@ -1,107 +1,112 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('readable empty buffer no eof 1', function (t) { - t.plan(1) - const r = new Readable() // should not end when we get a Buffer(0) or '' as the _read result - // that just means that there is *temporarily* no data, but to go - // ahead and try again later. - // - // note that this is very unusual. it only works for crypto streams - // because the other side of the stream will call read(0) to cycle - // data through openssl. that's why we set the timeouts to call - // r.read(0) again later, otherwise there is no more work being done - // and the process just exits. - - const buf = Buffer.alloc(5) - buf.fill('x') - let reads = 5 - - r._read = function (n) { - switch (reads--) { - case 0: - return r.push(null) - // EOF - - case 1: - return r.push(buf) - - case 2: - setTimeout(r.read.bind(r, 0), 50) - return r.push(Buffer.alloc(0)) - // Not-EOF! - - case 3: - setTimeout(r.read.bind(r, 0), 50) - return process.nextTick(function () { - return r.push(Buffer.alloc(0)) - }) - - case 4: - setTimeout(r.read.bind(r, 0), 50) - return setTimeout(function () { - return r.push(Buffer.alloc(0)) - }) - - case 5: - return setTimeout(function () { +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + +module.exports = function (test) { + test('readable empty buffer no eof 1', function (t) { + t.plan(1) + const r = new Readable() // should not end when we get a Buffer(0) or '' as the _read result + // that just means that there is *temporarily* no data, but to go + // ahead and try again later. + // + // note that this is very unusual. it only works for crypto streams + // because the other side of the stream will call read(0) to cycle + // data through openssl. that's why we set the timeouts to call + // r.read(0) again later, otherwise there is no more work being done + // and the process just exits. + + const buf = Buffer.alloc(5) + buf.fill('x') + let reads = 5 + + r._read = function (n) { + switch (reads--) { + case 0: + return r.push(null) + // EOF + + case 1: return r.push(buf) - }) - default: - throw new Error('unreachable') + case 2: + setTimeout(r.read.bind(r, 0), 50) + return r.push(Buffer.alloc(0)) + // Not-EOF! + + case 3: + setTimeout(r.read.bind(r, 0), 50) + return process.nextTick(function () { + return r.push(Buffer.alloc(0)) + }) + + case 4: + setTimeout(r.read.bind(r, 0), 50) + return setTimeout(function () { + return r.push(Buffer.alloc(0)) + }) + + case 5: + return setTimeout(function () { + return r.push(buf) + }) + + default: + throw new Error('unreachable') + } } - } - const results = [] + const results = [] - function flow() { - let chunk + function flow() { + let chunk - while ((chunk = r.read()) !== null) { - results.push(chunk + '') + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } } - } - r.on('readable', flow) - r.on('end', function () { - results.push('EOF') - t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']) - }) - flow() -}) -test('readable empty buffer no eof 2', function (t) { - t.plan(1) - const r = new Readable({ - encoding: 'base64' + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']) + }) + flow() }) - let reads = 5 - - r._read = function (n) { - if (!reads--) { - return r.push(null) // EOF - } else { - return r.push(Buffer.from('x')) + test('readable empty buffer no eof 2', function (t) { + t.plan(1) + const r = new Readable({ + encoding: 'base64' + }) + let reads = 5 + + r._read = function (n) { + if (!reads--) { + return r.push(null) // EOF + } else { + return r.push(Buffer.from('x')) + } } - } - const results = [] + const results = [] - function flow() { - let chunk + function flow() { + let chunk - while ((chunk = r.read()) !== null) { - results.push(chunk + '') + while ((chunk = r.read()) !== null) { + results.push(chunk + '') + } } - } - r.on('readable', flow) - r.on('end', function () { - results.push('EOF') - t.deepEqual(results, ['eHh4', 'eHg=', 'EOF']) + r.on('readable', flow) + r.on('end', function () { + results.push('EOF') + t.deepEqual(results, ['eHh4', 'eHg=', 'EOF']) + }) + flow() }) - flow() -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-empty-buffer-no-eof' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js index a2957c2be4..92c607d86b 100644 --- a/test/browser/test-stream2-readable-from-list.js +++ b/test/browser/test-stream2-readable-from-list.js @@ -1,11 +1,11 @@ 'use strict' -const test = require('tape') - const { _fromList: fromList } = require('../../lib/_stream_readable') const BufferList = require('../../lib/internal/streams/buffer_list') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + function bufferListFromArray(arr) { const bl = new BufferList() @@ -16,69 +16,74 @@ function bufferListFromArray(arr) { return bl } -test('buffers', function (t) { - t.plan(5) - let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] - list = bufferListFromArray(list) // read more than the first element. - - let ret = fromList(6, { - buffer: list, - length: 16 - }) - t.equal(ret.toString(), 'foogba') // read exactly the first element. - - ret = fromList(2, { - buffer: list, - length: 10 - }) - t.equal(ret.toString(), 'rk') // read less than the first element. - - ret = fromList(2, { - buffer: list, - length: 8 - }) - t.equal(ret.toString(), 'ba') // read more than we have. - - ret = fromList(100, { - buffer: list, - length: 6 - }) - t.equal(ret.toString(), 'zykuel') // all consumed. - - t.same(list, new BufferList()) -}) -test('strings', function (t) { - t.plan(5) - let list = ['foog', 'bark', 'bazy', 'kuel'] - list = bufferListFromArray(list) // read more than the first element. - - let ret = fromList(6, { - buffer: list, - length: 16, - decoder: true - }) - t.equal(ret, 'foogba') // read exactly the first element. - - ret = fromList(2, { - buffer: list, - length: 10, - decoder: true +module.exports = function (test) { + test('buffers', function (t) { + t.plan(5) + let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')] + list = bufferListFromArray(list) // read more than the first element. + + let ret = fromList(6, { + buffer: list, + length: 16 + }) + t.equal(ret.toString(), 'foogba') // read exactly the first element. + + ret = fromList(2, { + buffer: list, + length: 10 + }) + t.equal(ret.toString(), 'rk') // read less than the first element. + + ret = fromList(2, { + buffer: list, + length: 8 + }) + t.equal(ret.toString(), 'ba') // read more than we have. + + ret = fromList(100, { + buffer: list, + length: 6 + }) + t.equal(ret.toString(), 'zykuel') // all consumed. + + t.same(list, new BufferList()) }) - t.equal(ret, 'rk') // read less than the first element. - - ret = fromList(2, { - buffer: list, - length: 8, - decoder: true - }) - t.equal(ret, 'ba') // read more than we have. - - ret = fromList(100, { - buffer: list, - length: 6, - decoder: true + test('strings', function (t) { + t.plan(5) + let list = ['foog', 'bark', 'bazy', 'kuel'] + list = bufferListFromArray(list) // read more than the first element. + + let ret = fromList(6, { + buffer: list, + length: 16, + decoder: true + }) + t.equal(ret, 'foogba') // read exactly the first element. + + ret = fromList(2, { + buffer: list, + length: 10, + decoder: true + }) + t.equal(ret, 'rk') // read less than the first element. + + ret = fromList(2, { + buffer: list, + length: 8, + decoder: true + }) + t.equal(ret, 'ba') // read more than we have. + + ret = fromList(100, { + buffer: list, + length: 6, + decoder: true + }) + t.equal(ret, 'zykuel') // all consumed. + + t.same(list, new BufferList()) }) - t.equal(ret, 'zykuel') // all consumed. +} - t.same(list, new BufferList()) -}) +module.exports[kReadableStreamSuiteName] = 'stream2-readable-from-list' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js index 92d73eefde..4bff8d7afc 100644 --- a/test/browser/test-stream2-readable-legacy-drain.js +++ b/test/browser/test-stream2-readable-legacy-drain.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Stream, Readable } = require('../../lib/ours/index') -test('readable legacy drain', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(3) const r = new Readable() const N = 256 @@ -44,4 +44,6 @@ test('readable legacy drain', function (t) { } r.pipe(w) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-legacy-drain' diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js index 032a9ae21c..7a3e56b773 100644 --- a/test/browser/test-stream2-readable-non-empty-end.js +++ b/test/browser/test-stream2-readable-non-empty-end.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable } = require('../../lib/ours/index') -test('non empty end', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(4) let len = 0 const chunks = new Array(10) @@ -58,4 +58,6 @@ test('non empty end', function (t) { r = test.read() t.equal(r, null) } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-non-empty-end' diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js index 021bd4fdb8..40b4b8db87 100644 --- a/test/browser/test-stream2-readable-wrap-empty.js +++ b/test/browser/test-stream2-readable-wrap-empty.js @@ -1,12 +1,12 @@ 'use strict' -const test = require('tape') - const { EventEmitter: EE } = require('events') const Readable = require('../../lib/ours/index') -test('wrap empty', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(1) const oldStream = new EE() @@ -21,4 +21,6 @@ test('wrap empty', function (t) { t.ok(true, 'ended') }) oldStream.emit('end') -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap-empty' diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js index c26d40f74f..73a21b400a 100644 --- a/test/browser/test-stream2-readable-wrap.js +++ b/test/browser/test-stream2-readable-wrap.js @@ -1,112 +1,117 @@ 'use strict' -const test = require('tape') - const { EventEmitter: EE } = require('events') const { Readable, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + let run = 0 -function runTest(highWaterMark, objectMode, produce) { - test('run #' + ++run, (t) => { - t.plan(4) - const old = new EE() - const r = new Readable({ - highWaterMark: highWaterMark, - objectMode: objectMode - }) - t.equal(r, r.wrap(old)) - let ended = false - r.on('end', function () { - ended = true - }) +module.exports = function (test) { + function runTest(highWaterMark, objectMode, produce) { + test('run #' + ++run, (t) => { + t.plan(4) + const old = new EE() + const r = new Readable({ + highWaterMark, + objectMode + }) + t.equal(r, r.wrap(old)) + let ended = false + r.on('end', function () { + ended = true + }) + + old.pause = function () { + // console.error('old.pause()'); + old.emit('pause') + flowing = false + } - old.pause = function () { - // console.error('old.pause()'); - old.emit('pause') - flowing = false - } + old.resume = function () { + // console.error('old.resume()'); + old.emit('resume') + flow() + } - old.resume = function () { - // console.error('old.resume()'); - old.emit('resume') - flow() - } + let flowing + let chunks = 10 + let oldEnded = false + const expected = [] - let flowing - let chunks = 10 - let oldEnded = false - const expected = [] + function flow() { + flowing = true // eslint-disable-next-line no-unmodified-loop-condition - function flow() { - flowing = true // eslint-disable-next-line no-unmodified-loop-condition + while (flowing && chunks-- > 0) { + const item = produce() + expected.push(item) // console.log('old.emit', chunks, flowing); - while (flowing && chunks-- > 0) { - const item = produce() - expected.push(item) // console.log('old.emit', chunks, flowing); + old.emit('data', item) // console.log('after emit', chunks, flowing); + } - old.emit('data', item) // console.log('after emit', chunks, flowing); + if (chunks <= 0) { + oldEnded = true // console.log('old end', chunks, flowing); + + old.emit('end') + } } - if (chunks <= 0) { - oldEnded = true // console.log('old end', chunks, flowing); + const w = new Writable({ + highWaterMark: highWaterMark * 2, + objectMode + }) + const written = [] - old.emit('end') + w._write = function (chunk, encoding, cb) { + // console.log('_write', chunk); + written.push(chunk) + setTimeout(cb) } - } - const w = new Writable({ - highWaterMark: highWaterMark * 2, - objectMode: objectMode - }) - const written = [] - - w._write = function (chunk, encoding, cb) { - // console.log('_write', chunk); - written.push(chunk) - setTimeout(cb) - } + w.on('finish', function () { + performAsserts() + }) + r.pipe(w) + flow() - w.on('finish', function () { - performAsserts() + function performAsserts() { + t.ok(ended) + t.ok(oldEnded) + t.deepEqual(written, expected) + } }) - r.pipe(w) - flow() + } - function performAsserts() { - t.ok(ended) - t.ok(oldEnded) - t.deepEqual(written, expected) + runTest(100, false, function () { + return Buffer.alloc(100) + }) + runTest(10, false, function () { + return Buffer.from('xxxxxxxxxx') + }) + runTest(1, true, function () { + return { + foo: 'bar' } }) + const objectChunks = [ + 5, + 'a', + false, + 0, + '', + 'xyz', + { + x: 4 + }, + 7, + [], + 555 + ] + runTest(1, true, function () { + return objectChunks.shift() + }) } -runTest(100, false, function () { - return Buffer.alloc(100) -}) -runTest(10, false, function () { - return Buffer.from('xxxxxxxxxx') -}) -runTest(1, true, function () { - return { - foo: 'bar' - } -}) -const objectChunks = [ - 5, - 'a', - false, - 0, - '', - 'xyz', - { - x: 4 - }, - 7, - [], - 555 -] -runTest(1, true, function () { - return objectChunks.shift() -}) +module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js index c395de7579..c1ef6cd2eb 100644 --- a/test/browser/test-stream2-set-encoding.js +++ b/test/browser/test-stream2-set-encoding.js @@ -1,11 +1,11 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Readable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + inherits(TestReader, Readable) function TestReader(n, opts) { @@ -41,278 +41,283 @@ TestReader.prototype._read = function (n) { ) } -test('setEncoding utf8', function (t) { - t.plan(1) - const tr = new TestReader(100) - tr.setEncoding('utf8') - const out = [] - const expect = [ - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' - ] - tr.on('readable', function flow() { - let chunk +module.exports = function (test) { + test('setEncoding utf8', function (t) { + t.plan(1) + const tr = new TestReader(100) + tr.setEncoding('utf8') + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - t.same(out, expect) + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) -}) -test('setEncoding hex', function (t) { - t.plan(1) - const tr = new TestReader(100) - tr.setEncoding('hex') - const out = [] - const expect = [ - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' - ] - tr.on('readable', function flow() { - let chunk + test('setEncoding hex', function (t) { + t.plan(1) + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - t.same(out, expect) + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) -}) -test('setEncoding hex with read(13)', function (t) { - t.plan(1) - const tr = new TestReader(100) - tr.setEncoding('hex') - const out = [] - const expect = [ - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' - ] - tr.on('readable', function flow() { - // console.log('readable once'); - let chunk + test('setEncoding hex with read(13)', function (t) { + t.plan(1) + const tr = new TestReader(100) + tr.setEncoding('hex') + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + tr.on('readable', function flow() { + // console.log('readable once'); + let chunk - while ((chunk = tr.read(13)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - // console.log('END'); - t.same(out, expect) + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + // console.log('END'); + t.same(out, expect) + }) }) -}) -test('setEncoding base64', function (t) { - t.plan(1) - const tr = new TestReader(100) - tr.setEncoding('base64') - const out = [] - const expect = [ - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' - ] - tr.on('readable', function flow() { - let chunk + test('setEncoding base64', function (t) { + t.plan(1) + const tr = new TestReader(100) + tr.setEncoding('base64') + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - t.same(out, expect) - }) -}) -test('encoding: utf8', function (t) { - t.plan(1) - const tr = new TestReader(100, { - encoding: 'utf8' + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) - const out = [] - const expect = [ - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa', - 'aaaaaaaaaa' - ] - tr.on('readable', function flow() { - let chunk + test('encoding: utf8', function (t) { + t.plan(1) + const tr = new TestReader(100, { + encoding: 'utf8' + }) + const out = [] + const expect = [ + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa', + 'aaaaaaaaaa' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - t.same(out, expect) - }) -}) -test('encoding: hex', function (t) { - t.plan(1) - const tr = new TestReader(100, { - encoding: 'hex' + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) - const out = [] - const expect = [ - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161', - '6161616161' - ] - tr.on('readable', function flow() { - let chunk + test('encoding: hex', function (t) { + t.plan(1) + const tr = new TestReader(100, { + encoding: 'hex' + }) + const out = [] + const expect = [ + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161', + '6161616161' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - t.same(out, expect) - }) -}) -test('encoding: hex with read(13)', function (t) { - t.plan(1) - const tr = new TestReader(100, { - encoding: 'hex' + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) - const out = [] - const expect = [ - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '1616161616161', - '6161616161616', - '16161' - ] - tr.on('readable', function flow() { - let chunk + test('encoding: hex with read(13)', function (t) { + t.plan(1) + const tr = new TestReader(100, { + encoding: 'hex' + }) + const out = [] + const expect = [ + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '1616161616161', + '6161616161616', + '16161' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(13)) !== null) { - out.push(chunk) - } - }) - tr.on('end', function () { - t.same(out, expect) - }) -}) -test('encoding: base64', function (t) { - t.plan(1) - const tr = new TestReader(100, { - encoding: 'base64' + while ((chunk = tr.read(13)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) - const out = [] - const expect = [ - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYWFhYWFh', - 'YWFhYWFhYW', - 'FhYQ==' - ] - tr.on('readable', function flow() { - let chunk + test('encoding: base64', function (t) { + t.plan(1) + const tr = new TestReader(100, { + encoding: 'base64' + }) + const out = [] + const expect = [ + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYWFhYWFh', + 'YWFhYWFhYW', + 'FhYQ==' + ] + tr.on('readable', function flow() { + let chunk - while ((chunk = tr.read(10)) !== null) { - out.push(chunk) - } + while ((chunk = tr.read(10)) !== null) { + out.push(chunk) + } + }) + tr.on('end', function () { + t.same(out, expect) + }) }) - tr.on('end', function () { - t.same(out, expect) + test('chainable', function (t) { + t.plan(1) + const tr = new TestReader(100) + t.equal(tr.setEncoding('utf8'), tr) }) -}) -test('chainable', function (t) { - t.plan(1) - const tr = new TestReader(100) - t.equal(tr.setEncoding('utf8'), tr) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-set-encoding' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js index 9237ef7694..613e4b1334 100644 --- a/test/browser/test-stream2-transform.js +++ b/test/browser/test-stream2-transform.js @@ -1,474 +1,479 @@ 'use strict' -const test = require('tape') - const { PassThrough, Transform } = require('../../lib/ours/index') -test('writable side consumption', function (t) { - t.plan(3) - const tx = new Transform({ - highWaterMark: 10 - }) - let transformed = 0 +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') - tx._transform = function (chunk, encoding, cb) { - transformed += chunk.length - tx.push(chunk) - cb() - } +module.exports = function (test) { + test('writable side consumption', function (t) { + t.plan(3) + const tx = new Transform({ + highWaterMark: 10 + }) + let transformed = 0 - for (let i = 1; i <= 10; i++) { - tx.write(Buffer.alloc(i)) - } + tx._transform = function (chunk, encoding, cb) { + transformed += chunk.length + tx.push(chunk) + cb() + } - tx.end() - t.equal(tx._readableState.length, 10) - t.equal(transformed, 10) - t.same( - tx._writableState.getBuffer().map(function (c) { - return c.chunk.length - }), - [5, 6, 7, 8, 9, 10] - ) -}) -test('passthrough', function (t) { - t.plan(4) - const pt = new PassThrough() - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5).toString(), 'arkba') - t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5).toString(), 'l') -}) -test('object passthrough', function (t) { - t.plan(7) - const pt = new PassThrough({ - objectMode: true + for (let i = 1; i <= 10; i++) { + tx.write(Buffer.alloc(i)) + } + + tx.end() + t.equal(tx._readableState.length, 10) + t.equal(transformed, 10) + t.same( + tx._writableState.getBuffer().map(function (c) { + return c.chunk.length + }), + [5, 6, 7, 8, 9, 10] + ) }) - pt.write(1) - pt.write(true) - pt.write(false) - pt.write(0) - pt.write('foo') - pt.write('') - pt.write({ - a: 'b' + test('passthrough', function (t) { + t.plan(4) + const pt = new PassThrough() + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') }) - pt.end() - t.equal(pt.read(), 1) - t.equal(pt.read(), true) - t.equal(pt.read(), false) - t.equal(pt.read(), 0) - t.equal(pt.read(), 'foo') - t.equal(pt.read(), '') - t.same(pt.read(), { - a: 'b' + test('object passthrough', function (t) { + t.plan(7) + const pt = new PassThrough({ + objectMode: true + }) + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ + a: 'b' + }) + pt.end() + t.equal(pt.read(), 1) + t.equal(pt.read(), true) + t.equal(pt.read(), false) + t.equal(pt.read(), 0) + t.equal(pt.read(), 'foo') + t.equal(pt.read(), '') + t.same(pt.read(), { + a: 'b' + }) }) -}) -test('simple transform', function (t) { - t.plan(4) - const pt = new Transform() - - pt._transform = function (c, e, cb) { - const ret = Buffer.alloc(c.length) - ret.fill('x') - pt.push(ret) - cb() - } + test('simple transform', function (t) { + t.plan(4) + const pt = new Transform() + + pt._transform = function (c, e, cb) { + const ret = Buffer.alloc(c.length) + ret.fill('x') + pt.push(ret) + cb() + } - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - t.equal(pt.read(5).toString(), 'xxxxx') - t.equal(pt.read(5).toString(), 'xxxxx') - t.equal(pt.read(5).toString(), 'xxxxx') - t.equal(pt.read(5).toString(), 'x') -}) -test('simple object transform', function (t) { - t.plan(7) - const pt = new Transform({ - objectMode: true + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'xxxxx') + t.equal(pt.read(5).toString(), 'x') }) + test('simple object transform', function (t) { + t.plan(7) + const pt = new Transform({ + objectMode: true + }) - pt._transform = function (c, e, cb) { - pt.push(JSON.stringify(c)) - cb() - } - - pt.write(1) - pt.write(true) - pt.write(false) - pt.write(0) - pt.write('foo') - pt.write('') - pt.write({ - a: 'b' - }) - pt.end() - t.equal(pt.read(), '1') - t.equal(pt.read(), 'true') - t.equal(pt.read(), 'false') - t.equal(pt.read(), '0') - t.equal(pt.read(), '"foo"') - t.equal(pt.read(), '""') - t.equal(pt.read(), '{"a":"b"}') -}) -test('async passthrough', function (t) { - t.plan(4) - const pt = new Transform() - - pt._transform = function (chunk, encoding, cb) { - setTimeout(function () { - pt.push(chunk) + pt._transform = function (c, e, cb) { + pt.push(JSON.stringify(c)) cb() - }, 10) - } + } - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - pt.on('finish', function () { - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5).toString(), 'arkba') - t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5).toString(), 'l') + pt.write(1) + pt.write(true) + pt.write(false) + pt.write(0) + pt.write('foo') + pt.write('') + pt.write({ + a: 'b' + }) + pt.end() + t.equal(pt.read(), '1') + t.equal(pt.read(), 'true') + t.equal(pt.read(), 'false') + t.equal(pt.read(), '0') + t.equal(pt.read(), '"foo"') + t.equal(pt.read(), '""') + t.equal(pt.read(), '{"a":"b"}') }) -}) -test('assymetric transform (expand)', function (t) { - t.plan(7) - const pt = new Transform() // emit each chunk 2 times. + test('async passthrough', function (t) { + t.plan(4) + const pt = new Transform() - pt._transform = function (chunk, encoding, cb) { - setTimeout(function () { - pt.push(chunk) + pt._transform = function (chunk, encoding, cb) { setTimeout(function () { pt.push(chunk) cb() }, 10) - }, 10) - } + } - pt.write(Buffer.from('foog')) - pt.write(Buffer.from('bark')) - pt.write(Buffer.from('bazy')) - pt.write(Buffer.from('kuel')) - pt.end() - pt.on('finish', function () { - t.equal(pt.read(5).toString(), 'foogf') - t.equal(pt.read(5).toString(), 'oogba') - t.equal(pt.read(5).toString(), 'rkbar') - t.equal(pt.read(5).toString(), 'kbazy') - t.equal(pt.read(5).toString(), 'bazyk') - t.equal(pt.read(5).toString(), 'uelku') - t.equal(pt.read(5).toString(), 'el') + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5).toString(), 'l') + }) }) -}) -test('assymetric transform (compress)', function (t) { - t.plan(3) - const pt = new Transform() // each output is the first char of 3 consecutive chunks, - // or whatever's left. + test('assymetric transform (expand)', function (t) { + t.plan(7) + const pt = new Transform() // emit each chunk 2 times. - pt.state = '' - - pt._transform = function (chunk, encoding, cb) { - if (!chunk) { - chunk = '' + pt._transform = function (chunk, encoding, cb) { + setTimeout(function () { + pt.push(chunk) + setTimeout(function () { + pt.push(chunk) + cb() + }, 10) + }, 10) } - const s = chunk.toString() - setTimeout( - function () { - this.state += s.charAt(0) + pt.write(Buffer.from('foog')) + pt.write(Buffer.from('bark')) + pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('kuel')) + pt.end() + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'foogf') + t.equal(pt.read(5).toString(), 'oogba') + t.equal(pt.read(5).toString(), 'rkbar') + t.equal(pt.read(5).toString(), 'kbazy') + t.equal(pt.read(5).toString(), 'bazyk') + t.equal(pt.read(5).toString(), 'uelku') + t.equal(pt.read(5).toString(), 'el') + }) + }) + test('assymetric transform (compress)', function (t) { + t.plan(3) + const pt = new Transform() // each output is the first char of 3 consecutive chunks, + // or whatever's left. - if (this.state.length === 3) { - pt.push(Buffer.from(this.state)) - this.state = '' - } + pt.state = '' - cb() - }.bind(this), - 10 - ) - } + pt._transform = function (chunk, encoding, cb) { + if (!chunk) { + chunk = '' + } - pt._flush = function (cb) { - // just output whatever we have. - pt.push(Buffer.from(this.state)) - this.state = '' - cb() - } + const s = chunk.toString() + setTimeout( + function () { + this.state += s.charAt(0) - pt.write(Buffer.from('aaaa')) - pt.write(Buffer.from('bbbb')) - pt.write(Buffer.from('cccc')) - pt.write(Buffer.from('dddd')) - pt.write(Buffer.from('eeee')) - pt.write(Buffer.from('aaaa')) - pt.write(Buffer.from('bbbb')) - pt.write(Buffer.from('cccc')) - pt.write(Buffer.from('dddd')) - pt.write(Buffer.from('eeee')) - pt.write(Buffer.from('aaaa')) - pt.write(Buffer.from('bbbb')) - pt.write(Buffer.from('cccc')) - pt.write(Buffer.from('dddd')) - pt.end() // 'abcdeabcdeabcd' - - pt.on('finish', function () { - t.equal(pt.read(5).toString(), 'abcde') - t.equal(pt.read(5).toString(), 'abcde') - t.equal(pt.read(5).toString(), 'abcd') - }) -}) // this tests for a stall when data is written to a full stream -// that has empty transforms. - -test('complex transform', function (t) { - t.plan(2) - let count = 0 - let saved = null - const pt = new Transform({ - highWaterMark: 3 - }) + if (this.state.length === 3) { + pt.push(Buffer.from(this.state)) + this.state = '' + } - pt._transform = function (c, e, cb) { - if (count++ === 1) { - saved = c - } else { - if (saved) { - pt.push(saved) - saved = null - } + cb() + }.bind(this), + 10 + ) + } - pt.push(c) + pt._flush = function (cb) { + // just output whatever we have. + pt.push(Buffer.from(this.state)) + this.state = '' + cb() } - cb() - } + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.write(Buffer.from('eeee')) + pt.write(Buffer.from('aaaa')) + pt.write(Buffer.from('bbbb')) + pt.write(Buffer.from('cccc')) + pt.write(Buffer.from('dddd')) + pt.end() // 'abcdeabcdeabcd' + + pt.on('finish', function () { + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcde') + t.equal(pt.read(5).toString(), 'abcd') + }) + }) // this tests for a stall when data is written to a full stream + // that has empty transforms. + + test('complex transform', function (t) { + t.plan(2) + let count = 0 + let saved = null + const pt = new Transform({ + highWaterMark: 3 + }) - pt.once('readable', function () { - process.nextTick(function () { - pt.write(Buffer.from('d')) - pt.write(Buffer.from('ef'), function () { - pt.end() + pt._transform = function (c, e, cb) { + if (count++ === 1) { + saved = c + } else { + if (saved) { + pt.push(saved) + saved = null + } + + pt.push(c) + } + + cb() + } + + pt.once('readable', function () { + process.nextTick(function () { + pt.write(Buffer.from('d')) + pt.write(Buffer.from('ef'), function () { + pt.end() + }) + t.equal(pt.read().toString(), 'abcdef') + t.equal(pt.read(), null) }) - t.equal(pt.read().toString(), 'abcdef') - t.equal(pt.read(), null) }) + pt.write(Buffer.from('abc')) }) - pt.write(Buffer.from('abc')) -}) -test('passthrough event emission', function (t) { - t.plan(11) - const pt = new PassThrough() - let emits = 0 - pt.on('readable', function () { - // console.error('>>> emit readable %d', emits); - emits++ - }) - pt.write(Buffer.from('foog')) // console.error('need emit 0'); - - pt.write(Buffer.from('bark')) - setTimeout(() => { - // console.error('should have emitted readable now 1 === %d', emits) - t.equal(emits, 1) - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5) + '', 'null') // console.error('need emit 1'); + test('passthrough event emission', function (t) { + t.plan(11) + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('>>> emit readable %d', emits); + emits++ + }) + pt.write(Buffer.from('foog')) // console.error('need emit 0'); - pt.write(Buffer.from('bazy')) // console.error('should have emitted, but not again'); + pt.write(Buffer.from('bark')) + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits) + t.equal(emits, 1) + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5) + '', 'null') // console.error('need emit 1'); - pt.write(Buffer.from('kuel')) // console.error('should have emitted readable now 2 === %d', emits); + pt.write(Buffer.from('bazy')) // console.error('should have emitted, but not again'); - setTimeout(() => { - t.equal(emits, 2) - t.equal(pt.read(5).toString(), 'arkba') - t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5), null) // console.error('need emit 2'); + pt.write(Buffer.from('kuel')) // console.error('should have emitted readable now 2 === %d', emits); - pt.end() setTimeout(() => { - t.equal(emits, 3) - t.equal(pt.read(5).toString(), 'l') - t.equal(pt.read(5), null) // console.error('should not have emitted again'); + t.equal(emits, 2) + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5).toString(), 'zykue') + t.equal(pt.read(5), null) // console.error('need emit 2'); + + pt.end() + setTimeout(() => { + t.equal(emits, 3) + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) // console.error('should not have emitted again'); - t.equal(emits, 3) + t.equal(emits, 3) + }) }) }) }) -}) -test('passthrough event emission reordered', function (t) { - t.plan(10) - const pt = new PassThrough() - let emits = 0 - pt.on('readable', function () { - // console.error('emit readable', emits); - emits++ - }) - pt.write(Buffer.from('foog')) // console.error('need emit 0'); - - pt.write(Buffer.from('bark')) - setTimeout(() => { - // console.error('should have emitted readable now 1 === %d', emits); - t.equal(emits, 1) - t.equal(pt.read(5).toString(), 'foogb') - t.equal(pt.read(5), null) // console.error('need emit 1'); + test('passthrough event emission reordered', function (t) { + t.plan(10) + const pt = new PassThrough() + let emits = 0 + pt.on('readable', function () { + // console.error('emit readable', emits); + emits++ + }) + pt.write(Buffer.from('foog')) // console.error('need emit 0'); - pt.once('readable', function () { - t.equal(pt.read(5).toString(), 'arkba') - t.equal(pt.read(5), null) // console.error('need emit 2'); + pt.write(Buffer.from('bark')) + setTimeout(() => { + // console.error('should have emitted readable now 1 === %d', emits); + t.equal(emits, 1) + t.equal(pt.read(5).toString(), 'foogb') + t.equal(pt.read(5), null) // console.error('need emit 1'); pt.once('readable', function () { - t.equal(pt.read(5).toString(), 'zykue') - t.equal(pt.read(5), null) + t.equal(pt.read(5).toString(), 'arkba') + t.equal(pt.read(5), null) // console.error('need emit 2'); + pt.once('readable', function () { - t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5).toString(), 'zykue') t.equal(pt.read(5), null) - t.equal(emits, 4) + pt.once('readable', function () { + t.equal(pt.read(5).toString(), 'l') + t.equal(pt.read(5), null) + t.equal(emits, 4) + }) + pt.end() }) - pt.end() + pt.write(Buffer.from('kuel')) }) - pt.write(Buffer.from('kuel')) + pt.write(Buffer.from('bazy')) }) - pt.write(Buffer.from('bazy')) - }) -}) -test('passthrough facaded', function (t) { - t.plan(1) // console.error('passthrough facaded'); - - const pt = new PassThrough() - const datas = [] - pt.on('data', function (chunk) { - datas.push(chunk.toString()) - }) - pt.on('end', function () { - t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) }) - pt.write(Buffer.from('foog')) - setTimeout(function () { - pt.write(Buffer.from('bark')) + test('passthrough facaded', function (t) { + t.plan(1) // console.error('passthrough facaded'); + + const pt = new PassThrough() + const datas = [] + pt.on('data', function (chunk) { + datas.push(chunk.toString()) + }) + pt.on('end', function () { + t.same(datas, ['foog', 'bark', 'bazy', 'kuel']) + }) + pt.write(Buffer.from('foog')) setTimeout(function () { - pt.write(Buffer.from('bazy')) + pt.write(Buffer.from('bark')) setTimeout(function () { - pt.write(Buffer.from('kuel')) + pt.write(Buffer.from('bazy')) setTimeout(function () { - pt.end() + pt.write(Buffer.from('kuel')) + setTimeout(function () { + pt.end() + }, 10) }, 10) }, 10) }, 10) - }, 10) -}) -test('object transform (json parse)', function (t) { - t.plan(5) // console.error('json parse stream'); - - const jp = new Transform({ - objectMode: true }) + test('object transform (json parse)', function (t) { + t.plan(5) // console.error('json parse stream'); - jp._transform = function (data, encoding, cb) { - try { - jp.push(JSON.parse(data)) - cb() - } catch (er) { - cb(er) - } - } // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - - const objects = [ - { - foo: 'bar' - }, - 100, - 'string', - { - nested: { - things: [ - { - foo: 'bar' - }, - 100, - 'string' - ] + const jp = new Transform({ + objectMode: true + }) + + jp._transform = function (data, encoding, cb) { + try { + jp.push(JSON.parse(data)) + cb() + } catch (er) { + cb(er) } - } - ] - let ended = false - jp.on('end', function () { - ended = true - }) - forEach(objects, function (obj) { - jp.write(JSON.stringify(obj)) - const res = jp.read() - t.same(res, obj) - }) - jp.end() // read one more time to get the 'end' event + } // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + + const objects = [ + { + foo: 'bar' + }, + 100, + 'string', + { + nested: { + things: [ + { + foo: 'bar' + }, + 100, + 'string' + ] + } + } + ] + let ended = false + jp.on('end', function () { + ended = true + }) + forEach(objects, function (obj) { + jp.write(JSON.stringify(obj)) + const res = jp.read() + t.same(res, obj) + }) + jp.end() // read one more time to get the 'end' event - jp.read() - process.nextTick(function () { - t.ok(ended) + jp.read() + process.nextTick(function () { + t.ok(ended) + }) }) -}) -test('object transform (json stringify)', function (t) { - t.plan(5) // console.error('json parse stream'); + test('object transform (json stringify)', function (t) { + t.plan(5) // console.error('json parse stream'); - const js = new Transform({ - objectMode: true - }) + const js = new Transform({ + objectMode: true + }) - js._transform = function (data, encoding, cb) { - try { - js.push(JSON.stringify(data)) - cb() - } catch (er) { - cb(er) - } - } // anything except null/undefined is fine. - // those are "magic" in the stream API, because they signal EOF. - - const objects = [ - { - foo: 'bar' - }, - 100, - 'string', - { - nested: { - things: [ - { - foo: 'bar' - }, - 100, - 'string' - ] + js._transform = function (data, encoding, cb) { + try { + js.push(JSON.stringify(data)) + cb() + } catch (er) { + cb(er) } - } - ] - let ended = false - js.on('end', function () { - ended = true - }) - forEach(objects, function (obj) { - js.write(obj) - const res = js.read() - t.equal(res, JSON.stringify(obj)) - }) - js.end() // read one more time to get the 'end' event + } // anything except null/undefined is fine. + // those are "magic" in the stream API, because they signal EOF. + + const objects = [ + { + foo: 'bar' + }, + 100, + 'string', + { + nested: { + things: [ + { + foo: 'bar' + }, + 100, + 'string' + ] + } + } + ] + let ended = false + js.on('end', function () { + ended = true + }) + forEach(objects, function (obj) { + js.write(obj) + const res = js.read() + t.equal(res, JSON.stringify(obj)) + }) + js.end() // read one more time to get the 'end' event - js.read() - process.nextTick(function () { - t.ok(ended) + js.read() + process.nextTick(function () { + t.ok(ended) + }) }) -}) -function forEach(xs, f) { - for (let i = 0, l = xs.length; i < l; i++) { - f(xs[i], i) + function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } } } + +module.exports[kReadableStreamSuiteName] = 'stream2-transform' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js index 4adff6c03d..52742520a6 100644 --- a/test/browser/test-stream2-unpipe-drain.js +++ b/test/browser/test-stream2-unpipe-drain.js @@ -1,14 +1,14 @@ 'use strict' -const test = require('tape') - const crypto = require('crypto') const inherits = require('inherits') const stream = require('../../lib/ours/index') -test('unpipe drain', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { try { crypto.randomBytes(9) } catch (_) { @@ -61,4 +61,6 @@ test('unpipe drain', function (t) { t.equal(src1.reads, 2) t.equal(src2.reads, 1) }) -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream2-unpipe-drain' diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js index a04d7c6eb3..f80e77a49e 100644 --- a/test/browser/test-stream2-writable.js +++ b/test/browser/test-stream2-writable.js @@ -1,11 +1,11 @@ 'use strict' -const test = require('tape') - const inherits = require('inherits') const { Duplex, Writable } = require('../../lib/ours/index') +const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') + inherits(TestWriter, Writable) function TestWriter() { @@ -49,354 +49,359 @@ if (!process.stdout) { process.stdout = new Processstdout() } -test('write fast', function (t) { - t.plan(1) - const tw = new TestWriter({ - highWaterMark: 100 - }) - tw.on('finish', function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') - }) - forEach(chunks, function (chunk) { - // screw backpressure. Just buffer it all up. - tw.write(chunk) - }) - tw.end() -}) -test('write slow', function (t) { - t.plan(1) - const tw = new TestWriter({ - highWaterMark: 100 - }) - tw.on('finish', function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') +module.exports = function (test) { + test('write fast', function (t) { + t.plan(1) + const tw = new TestWriter({ + highWaterMark: 100 + }) + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) + forEach(chunks, function (chunk) { + // screw backpressure. Just buffer it all up. + tw.write(chunk) + }) + tw.end() }) - let i = 0 + test('write slow', function (t) { + t.plan(1) + const tw = new TestWriter({ + highWaterMark: 100 + }) + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + }) + let i = 0 - ;(function W() { - tw.write(chunks[i++]) + ;(function W() { + tw.write(chunks[i++]) - if (i < chunks.length) { - setTimeout(W, 10) - } else { - tw.end() - } - })() -}) -test('write backpressure', function (t) { - t.plan(19) - const tw = new TestWriter({ - highWaterMark: 50 - }) - let drains = 0 - tw.on('finish', function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') - t.equal(drains, 17) - }) - tw.on('drain', function () { - drains++ + if (i < chunks.length) { + setTimeout(W, 10) + } else { + tw.end() + } + })() }) - let i = 0 + test('write backpressure', function (t) { + t.plan(19) + const tw = new TestWriter({ + highWaterMark: 50 + }) + let drains = 0 + tw.on('finish', function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.equal(drains, 17) + }) + tw.on('drain', function () { + drains++ + }) + let i = 0 - ;(function W() { - let ret + ;(function W() { + let ret - do { - ret = tw.write(chunks[i++]) - } while (ret !== false && i < chunks.length) + do { + ret = tw.write(chunks[i++]) + } while (ret !== false && i < chunks.length) - if (i < chunks.length) { - t.ok(tw._writableState.length >= 50) - tw.once('drain', W) - } else { - tw.end() - } - })() -}) -test('write bufferize', function (t) { - t.plan(50) - const tw = new TestWriter({ - highWaterMark: 100 - }) - const encodings = [ - 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined - ] - tw.on('finish', function () { - forEach(chunks, function (chunk, i) { - const actual = Buffer.from(tw.buffer[i]) - chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes - - if (actual[actual.length - 1] === 0) { - chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + if (i < chunks.length) { + t.ok(tw._writableState.length >= 50) + tw.once('drain', W) + } else { + tw.end() } + })() + }) + test('write bufferize', function (t) { + t.plan(50) + const tw = new TestWriter({ + highWaterMark: 100 + }) + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes + + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } - t.same(actual, chunk, 'got the expected chunks ' + i) + t.same(actual, chunk, 'got the expected chunks ' + i) + }) }) + forEach(chunks, function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) + }) + tw.end() }) - forEach(chunks, function (chunk, i) { - const enc = encodings[i % encodings.length] - chunk = Buffer.from(chunk) - tw.write(chunk.toString(enc), enc) - }) - tw.end() -}) -test('write no bufferize', function (t) { - t.plan(100) - const tw = new TestWriter({ - highWaterMark: 100, - decodeStrings: false - }) - - tw._write = function (chunk, encoding, cb) { - t.equals(typeof chunk, 'string') - chunk = Buffer.from(chunk, encoding) - return TestWriter.prototype._write.call(this, chunk, encoding, cb) - } + test('write no bufferize', function (t) { + t.plan(100) + const tw = new TestWriter({ + highWaterMark: 100, + decodeStrings: false + }) - const encodings = [ - 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined - ] - tw.on('finish', function () { - forEach(chunks, function (chunk, i) { - const actual = Buffer.from(tw.buffer[i]) - chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes + tw._write = function (chunk, encoding, cb) { + t.equals(typeof chunk, 'string') + chunk = Buffer.from(chunk, encoding) + return TestWriter.prototype._write.call(this, chunk, encoding, cb) + } - if (actual[actual.length - 1] === 0) { - chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) - } + const encodings = [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined + ] + tw.on('finish', function () { + forEach(chunks, function (chunk, i) { + const actual = Buffer.from(tw.buffer[i]) + chunk = Buffer.from(chunk) // Some combination of encoding and length result in the last byte replaced by two extra null bytes + + if (actual[actual.length - 1] === 0) { + chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } - t.same(actual, chunk, 'got the expected chunks ' + i) + t.same(actual, chunk, 'got the expected chunks ' + i) + }) }) - }) - forEach(chunks, function (chunk, i) { - const enc = encodings[i % encodings.length] - chunk = Buffer.from(chunk) - tw.write(chunk.toString(enc), enc) - }) - tw.end() -}) -test('write callbacks', function (t) { - t.plan(2) - const callbacks = chunks - .map(function (chunk, i) { - return [ - i, - function (er) { - callbacks._called[i] = chunk - } - ] - }) - .reduce(function (set, x) { - set['callback-' + x[0]] = x[1] - return set - }, {}) - callbacks._called = [] - const tw = new TestWriter({ - highWaterMark: 100 - }) - tw.on('finish', function () { - process.nextTick(function () { - t.same(tw.buffer, chunks, 'got chunks in the right order') - t.same(callbacks._called, chunks, 'called all callbacks') + forEach(chunks, function (chunk, i) { + const enc = encodings[i % encodings.length] + chunk = Buffer.from(chunk) + tw.write(chunk.toString(enc), enc) }) + tw.end() + }) + test('write callbacks', function (t) { + t.plan(2) + const callbacks = chunks + .map(function (chunk, i) { + return [ + i, + function (er) { + callbacks._called[i] = chunk + } + ] + }) + .reduce(function (set, x) { + set['callback-' + x[0]] = x[1] + return set + }, {}) + callbacks._called = [] + const tw = new TestWriter({ + highWaterMark: 100 + }) + tw.on('finish', function () { + process.nextTick(function () { + t.same(tw.buffer, chunks, 'got chunks in the right order') + t.same(callbacks._called, chunks, 'called all callbacks') + }) + }) + forEach(chunks, function (chunk, i) { + tw.write(chunk, callbacks['callback-' + i]) + }) + tw.end() }) - forEach(chunks, function (chunk, i) { - tw.write(chunk, callbacks['callback-' + i]) - }) - tw.end() -}) -test('end callback', function (t) { - t.plan(1) - const tw = new TestWriter() - tw.end(() => { - t.ok(true) - }) -}) -test('end callback with chunk', function (t) { - t.plan(1) - const tw = new TestWriter() - tw.end(Buffer.from('hello world'), () => { - t.ok(true) + test('end callback', function (t) { + t.plan(1) + const tw = new TestWriter() + tw.end(() => { + t.ok(true) + }) }) -}) -test('end callback with chunk and encoding', function (t) { - t.plan(1) - const tw = new TestWriter() - tw.end('hello world', 'ascii', () => { - t.ok(true) + test('end callback with chunk', function (t) { + t.plan(1) + const tw = new TestWriter() + tw.end(Buffer.from('hello world'), () => { + t.ok(true) + }) }) -}) -test('end callback after .write() call', function (t) { - t.plan(1) - const tw = new TestWriter() - tw.write(Buffer.from('hello world')) - tw.end(() => { - t.ok(true) + test('end callback with chunk and encoding', function (t) { + t.plan(1) + const tw = new TestWriter() + tw.end('hello world', 'ascii', () => { + t.ok(true) + }) }) -}) -test('end callback called after write callback', function (t) { - t.plan(1) - const tw = new TestWriter() - let writeCalledback = false - tw.write(Buffer.from('hello world'), function () { - writeCalledback = true + test('end callback after .write() call', function (t) { + t.plan(1) + const tw = new TestWriter() + tw.write(Buffer.from('hello world')) + tw.end(() => { + t.ok(true) + }) }) - tw.end(function () { - t.equal(writeCalledback, true) + test('end callback called after write callback', function (t) { + t.plan(1) + const tw = new TestWriter() + let writeCalledback = false + tw.write(Buffer.from('hello world'), function () { + writeCalledback = true + }) + tw.end(function () { + t.equal(writeCalledback, true) + }) }) -}) -test('encoding should be ignored for buffers', function (t) { - t.plan(1) - const tw = new Writable() - const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' - - tw._write = function (chunk, encoding, cb) { - t.equal(chunk.toString('hex'), hex) - } + test('encoding should be ignored for buffers', function (t) { + t.plan(1) + const tw = new Writable() + const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb' - const buf = Buffer.from(hex, 'hex') - tw.write(buf, 'binary') -}) -test('writables are not pipable', function (t) { - t.plan(1) - const w = new Writable({ - autoDestroy: false + tw._write = function (chunk, encoding, cb) { + t.equal(chunk.toString('hex'), hex) + } + + const buf = Buffer.from(hex, 'hex') + tw.write(buf, 'binary') }) + test('writables are not pipable', function (t) { + t.plan(1) + const w = new Writable({ + autoDestroy: false + }) - w._write = function () {} + w._write = function () {} - let gotError = false - w.on('error', function (er) { - gotError = true + let gotError = false + w.on('error', function (er) { + gotError = true + }) + w.pipe(process.stdout) + t.ok(gotError) }) - w.pipe(process.stdout) - t.ok(gotError) -}) -test('duplexes are pipable', function (t) { - t.plan(1) - const d = new Duplex() + test('duplexes are pipable', function (t) { + t.plan(1) + const d = new Duplex() - d._read = function () {} + d._read = function () {} - d._write = function () {} + d._write = function () {} - let gotError = false - d.on('error', function (er) { - gotError = true - }) - d.pipe(process.stdout) - t.notOk(gotError) -}) -test('end(chunk) two times is an error', function (t) { - t.plan(2) - const w = new Writable() - - w._write = function () {} - - let gotError = false - w.on('error', function (er) { - gotError = true - t.equal(er.message, 'write after end') + let gotError = false + d.on('error', function (er) { + gotError = true + }) + d.pipe(process.stdout) + t.notOk(gotError) }) - w.end('this is the end') - w.end('and so is this') - process.nextTick(function () { - t.ok(gotError) + test('end(chunk) two times is an error', function (t) { + t.plan(2) + const w = new Writable() + + w._write = function () {} + + let gotError = false + w.on('error', function (er) { + gotError = true + t.equal(er.message, 'write after end') + }) + w.end('this is the end') + w.end('and so is this') + process.nextTick(function () { + t.ok(gotError) + }) }) -}) -test('dont end while writing', function (t) { - t.plan(2) - const w = new Writable() - let wrote = false - - w._write = function (chunk, e, cb) { - t.notOk(this.writing) - wrote = true - this.writing = true - setTimeout(function () { - this.writing = false - cb() + test('dont end while writing', function (t) { + t.plan(2) + const w = new Writable() + let wrote = false + + w._write = function (chunk, e, cb) { + t.notOk(this.writing) + wrote = true + this.writing = true + setTimeout(function () { + this.writing = false + cb() + }) + } + + w.on('finish', function () { + t.ok(wrote) }) - } + w.write(Buffer.alloc(0)) + w.end() + }) + test('finish does not come before write cb', function (t) { + t.plan(1) + const w = new Writable() + let writeCb = false + + w._write = function (chunk, e, cb) { + setTimeout(function () { + writeCb = true + cb() + }, 10) + } - w.on('finish', function () { - t.ok(wrote) + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0)) + w.end() }) - w.write(Buffer.alloc(0)) - w.end() -}) -test('finish does not come before write cb', function (t) { - t.plan(1) - const w = new Writable() - let writeCb = false - - w._write = function (chunk, e, cb) { - setTimeout(function () { - writeCb = true + test('finish does not come before sync _write cb', function (t) { + t.plan(1) + const w = new Writable() + let writeCb = false + + w._write = function (chunk, e, cb) { cb() - }, 10) - } + } - w.on('finish', function () { - t.ok(writeCb) + w.on('finish', function () { + t.ok(writeCb) + }) + w.write(Buffer.alloc(0), function (er) { + writeCb = true + }) + w.end() }) - w.write(Buffer.alloc(0)) - w.end() -}) -test('finish does not come before sync _write cb', function (t) { - t.plan(1) - const w = new Writable() - let writeCb = false - - w._write = function (chunk, e, cb) { - cb() - } + test('finish is emitted if last chunk is empty', function (t) { + t.plan(1) + const w = new Writable() - w.on('finish', function () { - t.ok(writeCb) - }) - w.write(Buffer.alloc(0), function (er) { - writeCb = true - }) - w.end() -}) -test('finish is emitted if last chunk is empty', function (t) { - t.plan(1) - const w = new Writable() - - w._write = function (chunk, e, cb) { - process.nextTick(cb) - } + w._write = function (chunk, e, cb) { + process.nextTick(cb) + } - w.on('finish', () => { - t.ok(true) + w.on('finish', () => { + t.ok(true) + }) + w.write(Buffer.alloc(1)) + w.end(Buffer.alloc(0)) }) - w.write(Buffer.alloc(1)) - w.end(Buffer.alloc(0)) -}) -function forEach(xs, f) { - for (let i = 0, l = xs.length; i < l; i++) { - f(xs[i], i) + function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i) + } } } + +module.exports[kReadableStreamSuiteName] = 'stream2-writable' +module.exports[kReadableStreamSuiteHasMultipleTests] = true diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js index f2f33b6026..a2399f6184 100644 --- a/test/browser/test-stream3-pause-then-read.js +++ b/test/browser/test-stream3-pause-then-read.js @@ -1,10 +1,10 @@ 'use strict' -const test = require('tape') - const { Readable, Writable } = require('../../lib/ours/index') -test('pause then read', function (t) { +const { kReadableStreamSuiteName } = require('./symbols') + +module.exports = function (t) { t.plan(7) const totalChunks = 100 const chunkSize = 99 @@ -47,6 +47,7 @@ test('pause then read', function (t) { function readn(n, then) { // console.error('read %d', n); expectEndingData -= n + ;(function read() { const c = r.read(n) @@ -152,4 +153,6 @@ test('pause then read', function (t) { }) r.pipe(w) } -}) +} + +module.exports[kReadableStreamSuiteName] = 'stream3-pause-then-read' diff --git a/test/common/fixtures.mjs b/test/common/fixtures.mjs index 372fabf88d..d6f7f6c092 100644 --- a/test/common/fixtures.mjs +++ b/test/common/fixtures.mjs @@ -1,5 +1,17 @@ -import fixtures from './fixtures.js' +import fixtures from './fixtures.js'; -const { fixturesDir, path, fileURL, readSync, readKey } = fixtures +const { + fixturesDir, + path, + fileURL, + readSync, + readKey, +} = fixtures; -export { fixturesDir, path, fileURL, readSync, readKey } +export { + fixturesDir, + path, + fileURL, + readSync, + readKey, +}; diff --git a/test/common/index.js b/test/common/index.js index c5c369f574..1c30eab135 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -280,7 +280,7 @@ function platformTimeout(ms) { } let knownGlobals = [ - typeof AggregateError !== 'undefined' ? AggregateError : require('aggregate-error'), + typeof AggregateError !== 'undefined' ? AggregateError : require('../../lib/ours/util').AggregateError, typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController, typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal, typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget, @@ -978,34 +978,3 @@ module.exports = new Proxy(common, { return obj[prop] } }) -/* replacement start */ - -if (typeof Blob === 'undefined') { - let { Blob } = require('buffer') - - if (typeof Blob === 'undefined') { - Blob = require('blob-polyfill').Blob - } - - globalThis.Blob = Blob - allowGlobals(Blob) -} - -if (typeof EventTarget === 'undefined') { - globalThis.EventTarget = require('event-target-shim').EventTarget -} - -if (typeof AbortController === 'undefined') { - globalThis.AbortController = require('abort-controller').AbortController -} - -if (typeof AbortSignal === 'undefined') { - globalThis.AbortSignal = require('abort-controller').AbortSignal - - globalThis.AbortSignal.abort = function () { - const controller = new AbortController() - controller.abort() - return controller.signal - } -} -/* replacement end */ diff --git a/test/common/index.mjs b/test/common/index.mjs index 85c2548b71..ec181dcacb 100644 --- a/test/common/index.mjs +++ b/test/common/index.mjs @@ -1,7 +1,7 @@ -import { createRequire } from 'module' +import { createRequire } from 'module'; -const require = createRequire(import.meta.url) -const common = require('./index.js') +const require = createRequire(import.meta.url); +const common = require('./index.js'); const { isMainThread, @@ -47,7 +47,7 @@ const { getBufferSources, getTTYfd, runWithInvalidFD -} = common +} = common; export { isMainThread, @@ -94,4 +94,4 @@ export { getTTYfd, runWithInvalidFD, createRequire -} +}; diff --git a/test/parallel/test-stream-add-abort-signal.js b/test/parallel/test-stream-add-abort-signal.js index c8a5e131f2..976c72b206 100644 --- a/test/parallel/test-stream-add-abort-signal.js +++ b/test/parallel/test-stream-add-abort-signal.js @@ -1,5 +1,21 @@ +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ // Flags: --expose-internals -'use strict' + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs index 35919114a9..a103920eef 100644 --- a/test/parallel/test-stream-asIndexedPairs.mjs +++ b/test/parallel/test-stream-asIndexedPairs.mjs @@ -1,82 +1,64 @@ -import '../common/index.mjs' -import { Readable } from '../../lib/ours/index.js' -import { deepStrictEqual, rejects, throws } from 'assert' -import tap from 'tap' +import '../common/index.mjs'; +import { Readable }from '../../lib/ours/index.js'; +import { deepStrictEqual, rejects, throws } from 'assert'; +import tap from 'tap'; { // asIndexedPairs with a synchronous stream - const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray() - deepStrictEqual(pairs, [ - [0, 1], - [1, 2], - [2, 3] - ]) - const empty = await Readable.from([]).asIndexedPairs().toArray() - deepStrictEqual(empty, []) + const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); + const empty = await Readable.from([]).asIndexedPairs().toArray(); + deepStrictEqual(empty, []); } { // asIndexedPairs works an asynchronous streams - const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x) - const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray() - deepStrictEqual(pairs, [ - [0, 1], - [1, 2], - [2, 3] - ]) - const empty = await asyncFrom([]).asIndexedPairs().toArray() - deepStrictEqual(empty, []) + const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x); + const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 2], [2, 3]]); + const empty = await asyncFrom([]).asIndexedPairs().toArray(); + deepStrictEqual(empty, []); } { // Does not enumerate an infinite stream - const infinite = () => - Readable.from( - (async function* () { - while (true) yield 1 - })() - ) - const pairs = await infinite().asIndexedPairs().take(3).toArray() - deepStrictEqual(pairs, [ - [0, 1], - [1, 1], - [2, 1] - ]) - const empty = await infinite().asIndexedPairs().take(0).toArray() - deepStrictEqual(empty, []) + const infinite = () => Readable.from(async function* () { + while (true) yield 1; + }()); + const pairs = await infinite().asIndexedPairs().take(3).toArray(); + deepStrictEqual(pairs, [[0, 1], [1, 1], [2, 1]]); + const empty = await infinite().asIndexedPairs().take(0).toArray(); + deepStrictEqual(empty, []); } { // AbortSignal - await rejects( - async () => { - const ac = new AbortController() - const { signal } = ac - const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray() - ac.abort() - await p - }, - { name: 'AbortError' } - ) + await rejects(async () => { + const ac = new AbortController(); + const { signal } = ac; + const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); + ac.abort(); + await p; + }, { name: 'AbortError' }); await rejects(async () => { - const signal = AbortSignal.abort() - await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray() - }, /AbortError/) + const signal = AbortSignal.abort(); + await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray(); + }, /AbortError/); } { // Error cases - throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/) - throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/) + throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/); + throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/); } -/* replacement start */ -process.on('beforeExit', (code) => { - if (code === 0) { - tap.pass('test succeeded') - } else { - tap.fail(`test failed - exited code ${code}`) - } -}) -/* replacement end */ + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js index f919c70f52..a80d54b8e4 100644 --- a/test/parallel/test-stream-drop-take.js +++ b/test/parallel/test-stream-drop-take.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') @@ -39,6 +55,7 @@ const naturals = () => deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2]) deepStrictEqual(await from([1, 2]).take(0).toArray(), []) })().then(common.mustCall()) // Asynchronous streams + ;(async () => { deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3]) deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1]) @@ -49,6 +66,7 @@ const naturals = () => deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), []) })().then(common.mustCall()) // Infinite streams // Asynchronous streams + ;(async () => { deepStrictEqual(await naturals().take(1).toArray(), [1]) deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2]) diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js index 67dd7c80dc..52a4f3a03a 100644 --- a/test/parallel/test-stream-duplex-destroy.js +++ b/test/parallel/test-stream-duplex-destroy.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js index 8eff213a46..6c10108046 100644 --- a/test/parallel/test-stream-duplex-from.js +++ b/test/parallel/test-stream-duplex-from.js @@ -13,11 +13,7 @@ const assert = require('assert') const { Duplex, Readable, Writable, pipeline } = require('../../lib/ours/index') -let { Blob } = require('buffer') - -if (typeof Blob === 'undefined') { - Blob = require('blob-polyfill').Blob -} +const Blob = globalThis.Blob || require('buffer').Blob { const d = Duplex.from({ @@ -202,7 +198,7 @@ if (typeof Blob === 'undefined') { assert.strictEqual(Duplex.from(duplex), duplex) } // Ensure that Duplex.from works for blobs -{ +if (typeof Blob !== 'undefined') { const blob = new Blob(['blob']) const expectedByteLength = blob.size const duplex = Duplex.from(blob) diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js index 5ed6fb1046..73571bf4e5 100644 --- a/test/parallel/test-stream-filter.js +++ b/test/parallel/test-stream-filter.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js index 5f659f667e..2b9d9c400d 100644 --- a/test/parallel/test-stream-finished.js +++ b/test/parallel/test-stream-finished.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js index d653682276..af775d1ba8 100644 --- a/test/parallel/test-stream-flatMap.js +++ b/test/parallel/test-stream-flatMap.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') @@ -75,6 +91,7 @@ function oneTo5() { .toArray() assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]) })().then(common.mustCall()) // flatMap works on an objectMode stream where mappign returns a stream + ;(async () => { const result = await oneTo5() .flatMap(() => { diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js index 083864f06f..726327a51a 100644 --- a/test/parallel/test-stream-forEach.js +++ b/test/parallel/test-stream-forEach.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs index 8231f80ced..9f09abeab6 100644 --- a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs +++ b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs @@ -1,7 +1,7 @@ -import { mustCall } from '../common/index.mjs' -import { Readable } from '../../lib/ours/index.js' -import assert from 'assert' -import tap from 'tap' +import { mustCall } from '../common/index.mjs'; +import { Readable }from '../../lib/ours/index.js'; +import assert from 'assert'; +import tap from 'tap'; // These tests are manually ported from the draft PR for the test262 test suite // Authored by Rick Waldron in https://github.com/tc39/test262/pull/2818/files @@ -46,131 +46,134 @@ import tap from 'tap' // * Ecma International Standards hereafter means Ecma International Standards // as well as Ecma Technical Reports + // Note all the tests that check AsyncIterator's prototype itself and things // that happen before stream conversion were not ported. { // asIndexedPairs/is-function - assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function') + assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function'); // asIndexedPairs/indexed-pairs.js - const iterator = Readable.from([0, 1]) - const indexedPairs = iterator.asIndexedPairs() + const iterator = Readable.from([0, 1]); + const indexedPairs = iterator.asIndexedPairs(); for await (const [i, v] of indexedPairs) { - assert.strictEqual(i, v) + assert.strictEqual(i, v); } // asIndexedPairs/length.js - assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0) + assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0); // asIndexedPairs/name.js - assert.strictEqual(Readable.prototype.asIndexedPairs.name, 'asIndexedPairs') - const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'asIndexedPairs') - assert.strictEqual(descriptor.enumerable, false) - assert.strictEqual(descriptor.configurable, true) - assert.strictEqual(descriptor.writable, true) + assert.strictEqual(Readable.prototype.asIndexedPairs.name, 'asIndexedPairs'); + const descriptor = Object.getOwnPropertyDescriptor( + Readable.prototype, + 'asIndexedPairs' + ); + assert.strictEqual(descriptor.enumerable, false); + assert.strictEqual(descriptor.configurable, true); + assert.strictEqual(descriptor.writable, true); } { // drop/length - assert.strictEqual(Readable.prototype.drop.length, 1) - const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'drop') - assert.strictEqual(descriptor.enumerable, false) - assert.strictEqual(descriptor.configurable, true) - assert.strictEqual(descriptor.writable, true) + assert.strictEqual(Readable.prototype.drop.length, 1); + const descriptor = Object.getOwnPropertyDescriptor( + Readable.prototype, + 'drop' + ); + assert.strictEqual(descriptor.enumerable, false); + assert.strictEqual(descriptor.configurable, true); + assert.strictEqual(descriptor.writable, true); // drop/limit-equals-total - const iterator = Readable.from([1, 2]).drop(2) - const result = await iterator[Symbol.asyncIterator]().next() - assert.deepStrictEqual(result, { done: true, value: undefined }) + const iterator = Readable.from([1, 2]).drop(2); + const result = await iterator[Symbol.asyncIterator]().next(); + assert.deepStrictEqual(result, { done: true, value: undefined }); // drop/limit-greater-than-total.js - const iterator2 = Readable.from([1, 2]).drop(3) - const result2 = await iterator2[Symbol.asyncIterator]().next() - assert.deepStrictEqual(result2, { done: true, value: undefined }) + const iterator2 = Readable.from([1, 2]).drop(3); + const result2 = await iterator2[Symbol.asyncIterator]().next(); + assert.deepStrictEqual(result2, { done: true, value: undefined }); // drop/limit-less-than-total.js - const iterator3 = Readable.from([1, 2]).drop(1) - const result3 = await iterator3[Symbol.asyncIterator]().next() - assert.deepStrictEqual(result3, { done: false, value: 2 }) + const iterator3 = Readable.from([1, 2]).drop(1); + const result3 = await iterator3[Symbol.asyncIterator]().next(); + assert.deepStrictEqual(result3, { done: false, value: 2 }); // drop/limit-rangeerror - assert.throws(() => Readable.from([1]).drop(-1), RangeError) + assert.throws(() => Readable.from([1]).drop(-1), RangeError); assert.throws(() => { Readable.from([1]).drop({ valueOf() { - throw new Error('boom') + throw new Error('boom'); } - }) - }, /boom/) + }); + }, /boom/); // drop/limit-tointeger - const two = await Readable.from([1, 2]) - .drop({ valueOf: () => 1 }) - .toArray() - assert.deepStrictEqual(two, [2]) + const two = await Readable.from([1, 2]).drop({ valueOf: () => 1 }).toArray(); + assert.deepStrictEqual(two, [2]); // drop/name - assert.strictEqual(Readable.prototype.drop.name, 'drop') + assert.strictEqual(Readable.prototype.drop.name, 'drop'); // drop/non-constructible - assert.throws(() => new Readable.prototype.drop(1), TypeError) + assert.throws(() => new Readable.prototype.drop(1), TypeError); // drop/proto - const proto = Object.getPrototypeOf(Readable.prototype.drop) - assert.strictEqual(proto, Function.prototype) + const proto = Object.getPrototypeOf(Readable.prototype.drop); + assert.strictEqual(proto, Function.prototype); } { // every/abrupt-iterator-close - const stream = Readable.from([1, 2, 3]) - const e = new Error() - await assert.rejects( - stream.every( - mustCall(() => { - throw e - }, 1) - ), - e - ) + const stream = Readable.from([1, 2, 3]); + const e = new Error(); + await assert.rejects(stream.every(mustCall(() => { + throw e; + }, 1)), e); } { // every/callable-fn - await assert.rejects(Readable.from([1, 2]).every({}), TypeError) + await assert.rejects(Readable.from([1, 2]).every({}), TypeError); } { // every/callable - Readable.prototype.every.call(Readable.from([]), () => {}) + Readable.prototype.every.call(Readable.from([]), () => {}); // eslint-disable-next-line array-callback-return - Readable.from([]).every(() => {}) + Readable.from([]).every(() => {}); assert.throws(() => { - const r = Readable.from([]) - new r.every(() => {}) - }, TypeError) + const r = Readable.from([]); + new r.every(() => {}); + }, TypeError); } { // every/false - const iterator = Readable.from([1, 2, 3]) - const result = await iterator.every((v) => v === 1) - assert.strictEqual(result, false) + const iterator = Readable.from([1, 2, 3]); + const result = await iterator.every((v) => v === 1); + assert.strictEqual(result, false); } { // every/every - const iterator = Readable.from([1, 2, 3]) - const result = await iterator.every((v) => true) - assert.strictEqual(result, true) + const iterator = Readable.from([1, 2, 3]); + const result = await iterator.every((v) => true); + assert.strictEqual(result, true); } { // every/is-function - assert.strictEqual(typeof Readable.prototype.every, 'function') + assert.strictEqual(typeof Readable.prototype.every, 'function'); } { // every/length - assert.strictEqual(Readable.prototype.every.length, 1) + assert.strictEqual(Readable.prototype.every.length, 1); // every/name - assert.strictEqual(Readable.prototype.every.name, 'every') + assert.strictEqual(Readable.prototype.every.name, 'every'); // every/propdesc - const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'every') - assert.strictEqual(descriptor.enumerable, false) - assert.strictEqual(descriptor.configurable, true) - assert.strictEqual(descriptor.writable, true) + const descriptor = Object.getOwnPropertyDescriptor( + Readable.prototype, + 'every' + ); + assert.strictEqual(descriptor.enumerable, false); + assert.strictEqual(descriptor.configurable, true); + assert.strictEqual(descriptor.writable, true); } -/* replacement start */ -process.on('beforeExit', (code) => { - if (code === 0) { - tap.pass('test succeeded') - } else { - tap.fail(`test failed - exited code ${code}`) - } -}) -/* replacement end */ + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js index c29282cdbc..1823b1e651 100644 --- a/test/parallel/test-stream-readable-destroy.js +++ b/test/parallel/test-stream-readable-destroy.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js index a13eaa627d..4c4f600041 100644 --- a/test/parallel/test-stream-reduce.js +++ b/test/parallel/test-stream-reduce.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-some-find-every.mjs b/test/parallel/test-stream-some-find-every.mjs index 30298d0d07..34c8e2a8a2 100644 --- a/test/parallel/test-stream-some-find-every.mjs +++ b/test/parallel/test-stream-some-find-every.mjs @@ -1,215 +1,183 @@ -import * as common from '../common/index.mjs' -import { setTimeout } from 'timers/promises' -import { Readable } from '../../lib/ours/index.js' -import assert from 'assert' -import tap from 'tap' +import * as common from '../common/index.mjs'; +import { setTimeout } from 'timers/promises'; +import { Readable }from '../../lib/ours/index.js'; +import assert from 'assert'; +import tap from 'tap'; + function oneTo5() { - return Readable.from([1, 2, 3, 4, 5]) + return Readable.from([1, 2, 3, 4, 5]); } function oneTo5Async() { return oneTo5().map(async (x) => { - await Promise.resolve() - return x - }) + await Promise.resolve(); + return x; + }); } { // Some, find, and every work with a synchronous stream and predicate - assert.strictEqual(await oneTo5().some((x) => x > 3), true) - assert.strictEqual(await oneTo5().every((x) => x > 3), false) - assert.strictEqual(await oneTo5().find((x) => x > 3), 4) - assert.strictEqual(await oneTo5().some((x) => x > 6), false) - assert.strictEqual(await oneTo5().every((x) => x < 6), true) - assert.strictEqual(await oneTo5().find((x) => x > 6), undefined) - assert.strictEqual(await Readable.from([]).some(() => true), false) - assert.strictEqual(await Readable.from([]).every(() => true), true) - assert.strictEqual(await Readable.from([]).find(() => true), undefined) + assert.strictEqual(await oneTo5().some((x) => x > 3), true); + assert.strictEqual(await oneTo5().every((x) => x > 3), false); + assert.strictEqual(await oneTo5().find((x) => x > 3), 4); + assert.strictEqual(await oneTo5().some((x) => x > 6), false); + assert.strictEqual(await oneTo5().every((x) => x < 6), true); + assert.strictEqual(await oneTo5().find((x) => x > 6), undefined); + assert.strictEqual(await Readable.from([]).some(() => true), false); + assert.strictEqual(await Readable.from([]).every(() => true), true); + assert.strictEqual(await Readable.from([]).find(() => true), undefined); } { // Some, find, and every work with an asynchronous stream and synchronous predicate - assert.strictEqual(await oneTo5Async().some((x) => x > 3), true) - assert.strictEqual(await oneTo5Async().every((x) => x > 3), false) - assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4) - assert.strictEqual(await oneTo5Async().some((x) => x > 6), false) - assert.strictEqual(await oneTo5Async().every((x) => x < 6), true) - assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined) + assert.strictEqual(await oneTo5Async().some((x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every((x) => x > 3), false); + assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4); + assert.strictEqual(await oneTo5Async().some((x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every((x) => x < 6), true); + assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined); } { // Some, find, and every work on synchronous streams with an asynchronous predicate - assert.strictEqual(await oneTo5().some(async (x) => x > 3), true) - assert.strictEqual(await oneTo5().every(async (x) => x > 3), false) - assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4) - assert.strictEqual(await oneTo5().some(async (x) => x > 6), false) - assert.strictEqual(await oneTo5().every(async (x) => x < 6), true) - assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined) + assert.strictEqual(await oneTo5().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4); + assert.strictEqual(await oneTo5().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5().every(async (x) => x < 6), true); + assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined); } { // Some, find, and every work on asynchronous streams with an asynchronous predicate - assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true) - assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false) - assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4) - assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false) - assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true) - assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined) + assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true); + assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false); + assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4); + assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false); + assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true); + assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined); } { async function checkDestroyed(stream) { - await setTimeout() - assert.strictEqual(stream.destroyed, true) + await setTimeout(); + assert.strictEqual(stream.destroyed, true); } { // Some, find, and every short circuit - const someStream = oneTo5() - await someStream.some(common.mustCall((x) => x > 2, 3)) - await checkDestroyed(someStream) + const someStream = oneTo5(); + await someStream.some(common.mustCall((x) => x > 2, 3)); + await checkDestroyed(someStream); - const everyStream = oneTo5() - await everyStream.every(common.mustCall((x) => x < 3, 3)) - await checkDestroyed(everyStream) + const everyStream = oneTo5(); + await everyStream.every(common.mustCall((x) => x < 3, 3)); + await checkDestroyed(everyStream); - const findStream = oneTo5() - await findStream.find(common.mustCall((x) => x > 1, 2)) - await checkDestroyed(findStream) + const findStream = oneTo5(); + await findStream.find(common.mustCall((x) => x > 1, 2)); + await checkDestroyed(findStream); // When short circuit isn't possible the whole stream is iterated - await oneTo5().some(common.mustCall(() => false, 5)) - await oneTo5().every(common.mustCall(() => true, 5)) - await oneTo5().find(common.mustCall(() => false, 5)) + await oneTo5().some(common.mustCall(() => false, 5)); + await oneTo5().every(common.mustCall(() => true, 5)); + await oneTo5().find(common.mustCall(() => false, 5)); } { // Some, find, and every short circuit async stream/predicate - const someStream = oneTo5Async() - await someStream.some(common.mustCall(async (x) => x > 2, 3)) - await checkDestroyed(someStream) + const someStream = oneTo5Async(); + await someStream.some(common.mustCall(async (x) => x > 2, 3)); + await checkDestroyed(someStream); - const everyStream = oneTo5Async() - await everyStream.every(common.mustCall(async (x) => x < 3, 3)) - await checkDestroyed(everyStream) + const everyStream = oneTo5Async(); + await everyStream.every(common.mustCall(async (x) => x < 3, 3)); + await checkDestroyed(everyStream); - const findStream = oneTo5Async() - await findStream.find(common.mustCall(async (x) => x > 1, 2)) - await checkDestroyed(findStream) + const findStream = oneTo5Async(); + await findStream.find(common.mustCall(async (x) => x > 1, 2)); + await checkDestroyed(findStream); // When short circuit isn't possible the whole stream is iterated - await oneTo5Async().some(common.mustCall(async () => false, 5)) - await oneTo5Async().every(common.mustCall(async () => true, 5)) - await oneTo5Async().find(common.mustCall(async () => false, 5)) + await oneTo5Async().some(common.mustCall(async () => false, 5)); + await oneTo5Async().every(common.mustCall(async () => true, 5)); + await oneTo5Async().find(common.mustCall(async () => false, 5)); } } { // Concurrency doesn't affect which value is found. - const found = await Readable.from([1, 2]).find( - async (val) => { - if (val === 1) { - await setTimeout(100) - } - return true - }, - { concurrency: 2 } - ) - assert.strictEqual(found, 1) + const found = await Readable.from([1, 2]).find(async (val) => { + if (val === 1) { + await setTimeout(100); + } + return true; + }, { concurrency: 2 }); + assert.strictEqual(found, 1); } { // Support for AbortSignal for (const op of ['some', 'every', 'find']) { { - const ac = new AbortController() - assert - .rejects( - Readable.from([1, 2, 3])[op](() => new Promise(() => {}), { signal: ac.signal }), - { - name: 'AbortError' - }, - `${op} should abort correctly with sync abort` - ) - .then(common.mustCall()) - ac.abort() + const ac = new AbortController(); + assert.rejects(Readable.from([1, 2, 3])[op]( + () => new Promise(() => { }), + { signal: ac.signal } + ), { + name: 'AbortError', + }, `${op} should abort correctly with sync abort`).then(common.mustCall()); + ac.abort(); } { // Support for pre-aborted AbortSignal - assert - .rejects( - Readable.from([1, 2, 3])[op](() => new Promise(() => {}), { signal: AbortSignal.abort() }), - { - name: 'AbortError' - }, - `${op} should abort with pre-aborted abort controller` - ) - .then(common.mustCall()) + assert.rejects(Readable.from([1, 2, 3])[op]( + () => new Promise(() => { }), + { signal: AbortSignal.abort() } + ), { + name: 'AbortError', + }, `${op} should abort with pre-aborted abort controller`).then(common.mustCall()); } } } { // Error cases for (const op of ['some', 'every', 'find']) { - assert - .rejects( - async () => { - await Readable.from([1])[op](1) - }, - /ERR_INVALID_ARG_TYPE/, - `${op} should throw for invalid function` - ) - .then(common.mustCall()) - assert - .rejects( - async () => { - await Readable.from([1])[op]((x) => x, { - concurrency: 'Foo' - }) - }, - /ERR_OUT_OF_RANGE/, - `${op} should throw for invalid concurrency` - ) - .then(common.mustCall()) - assert - .rejects( - async () => { - await Readable.from([1])[op]((x) => x, 1) - }, - /ERR_INVALID_ARG_TYPE/, - `${op} should throw for invalid concurrency` - ) - .then(common.mustCall()) - assert - .rejects( - async () => { - await Readable.from([1])[op]((x) => x, { - signal: true - }) - }, - /ERR_INVALID_ARG_TYPE/, - `${op} should throw for invalid signal` - ) - .then(common.mustCall()) + assert.rejects(async () => { + await Readable.from([1])[op](1); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid function`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, { + concurrency: 'Foo' + }); + }, /ERR_OUT_OF_RANGE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, 1); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid concurrency`).then(common.mustCall()); + assert.rejects(async () => { + await Readable.from([1])[op]((x) => x, { + signal: true + }); + }, /ERR_INVALID_ARG_TYPE/, `${op} should throw for invalid signal`).then(common.mustCall()); } } { for (const op of ['some', 'every', 'find']) { - const stream = oneTo5() + const stream = oneTo5(); Object.defineProperty(stream, 'map', { - value: common.mustNotCall(() => {}) - }) + value: common.mustNotCall(() => {}), + }); // Check that map isn't getting called. - stream[op](() => {}) + stream[op](() => {}); } } -/* replacement start */ -process.on('beforeExit', (code) => { - if (code === 0) { - tap.pass('test succeeded') - } else { - tap.fail(`test failed - exited code ${code}`) - } -}) -/* replacement end */ + /* replacement start */ + process.on('beforeExit', (code) => { + if(code === 0) { + tap.pass('test succeeded'); + } else { + tap.fail(`test failed - exited code ${code}`); + } + }); + /* replacement end */ diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js index 7ad66bf0fd..8f3e9b6e76 100644 --- a/test/parallel/test-stream-toArray.js +++ b/test/parallel/test-stream-toArray.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js index 2f7f9ee3e7..8eac3eda60 100644 --- a/test/parallel/test-stream-transform-split-highwatermark.js +++ b/test/parallel/test-stream-transform-split-highwatermark.js @@ -66,6 +66,7 @@ testTransform(0, 0, { readableHighWaterMark: 666, writableHighWaterMark: 777 }) // Test undefined, null + ;[undefined, null].forEach((v) => { testTransform(DEFAULT, DEFAULT, { readableHighWaterMark: v diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js index 78e4546458..4f4b1b697e 100644 --- a/test/parallel/test-stream-writable-change-default-encoding.js +++ b/test/parallel/test-stream-writable-change-default-encoding.js @@ -57,6 +57,7 @@ class MyWritable extends stream.Writable { m.write('foo') m.end() })() + ;(function changeDefaultEncodingToAscii() { const m = new MyWritable( function (isBuffer, type, enc) { @@ -86,6 +87,7 @@ assert.throws( message: 'Unknown encoding: {}' } ) + ;(function checkVariableCaseEncoding() { const m = new MyWritable( function (isBuffer, type, enc) { diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js index 5edecba31a..8c6ea069d1 100644 --- a/test/parallel/test-stream-writable-destroy.js +++ b/test/parallel/test-stream-writable-destroy.js @@ -1,4 +1,20 @@ -'use strict' +/* replacement start */ +const AbortController = globalThis.AbortController || require('abort-controller').AbortController + +const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal + +const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget + +if (typeof AbortSignal.abort !== 'function') { + AbortSignal.abort = function () { + const controller = new AbortController() + controller.abort() + return controller.signal + } +} +/* replacement end */ + +;('use strict') const tap = require('tap') diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js index 11eacc4a09..c62fd3e2d6 100644 --- a/test/parallel/test-stream3-pause-then-read.js +++ b/test/parallel/test-stream3-pause-then-read.js @@ -73,6 +73,7 @@ function read100() { function readn(n, then) { silentConsole.error(`read ${n}`) expectEndingData -= n + ;(function read() { const c = r.read(n) silentConsole.error('c', c) From 3ad53b2f634be766b86a13405251d689c59d5816 Mon Sep 17 00:00:00 2001 From: Shogun Date: Fri, 13 May 2022 15:21:32 +0200 Subject: [PATCH 08/19] fix: Fix path on Windows. --- src/test/browser/runner.mjs | 8 ++++---- test/browser/runner.mjs | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/test/browser/runner.mjs b/src/test/browser/runner.mjs index db7f0ac31b..5dd856cc13 100644 --- a/src/test/browser/runner.mjs +++ b/src/test/browser/runner.mjs @@ -1,8 +1,9 @@ -import { dirname, resolve } from 'node:path' +import { resolve } from 'node:path' import { Readable } from 'node:stream' import { chromium, firefox, webkit } from 'playwright' import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' +import { fileURLToPath } from 'url' const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] const validBundlers = ['browserify', 'webpack', 'rollup'] @@ -104,6 +105,5 @@ const page = await browser.newPage() setupTape(page, configuration) // Execute the test suite -await page.goto( - `file://${resolve(dirname(new URL(import.meta.url).pathname), `../../tmp/${configuration.bundler}/index.html`)}` -) +const __dirname = fileURLToPath(new URL('.', import.meta.url)) +await page.goto(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/index.html`)}`) diff --git a/test/browser/runner.mjs b/test/browser/runner.mjs index db7f0ac31b..5dd856cc13 100644 --- a/test/browser/runner.mjs +++ b/test/browser/runner.mjs @@ -1,8 +1,9 @@ -import { dirname, resolve } from 'node:path' +import { resolve } from 'node:path' import { Readable } from 'node:stream' import { chromium, firefox, webkit } from 'playwright' import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' +import { fileURLToPath } from 'url' const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] const validBundlers = ['browserify', 'webpack', 'rollup'] @@ -104,6 +105,5 @@ const page = await browser.newPage() setupTape(page, configuration) // Execute the test suite -await page.goto( - `file://${resolve(dirname(new URL(import.meta.url).pathname), `../../tmp/${configuration.bundler}/index.html`)}` -) +const __dirname = fileURLToPath(new URL('.', import.meta.url)) +await page.goto(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/index.html`)}`) From a4bcaff80064b93a36e0c44934d4df28d3876cf0 Mon Sep 17 00:00:00 2001 From: Shogun Date: Fri, 13 May 2022 15:30:23 +0200 Subject: [PATCH 09/19] fix: Fixed examples. --- .github/workflows/node.yml | 2 +- build/build.mjs | 4 +++- examples/capslock-type.cjs | 2 +- examples/typer.mjs | 5 +++-- src/test/browser/runner.mjs | 2 +- test/browser/runner.mjs | 2 +- 6 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml index a8877354bc..77cabff029 100644 --- a/.github/workflows/node.yml +++ b/.github/workflows/node.yml @@ -10,7 +10,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macOS-latest] - node-version: [12.x, 14.x, 16.x, 17.x] + node-version: [12.x, 14.x, 16.x, 18.x] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/build/build.mjs b/build/build.mjs index c94a886754..8dcc4990e4 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -4,6 +4,7 @@ import { mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises' import { dirname, resolve } from 'node:path' import process from 'node:process' import { finished } from 'node:stream/promises' +import { fileURLToPath } from 'node:url' import prettier from 'prettier' import { Parse } from 'tar' import { request } from 'undici' @@ -143,7 +144,8 @@ async function downloadNode(nodeVersion) { } async function main() { - const rootDir = resolve(dirname(new URL(import.meta.url).pathname), '..') + const __dirname = fileURLToPath(new URL('.', import.meta.url)) + const rootDir = resolve(__dirname, '..') if (process.cwd() !== rootDir) { console.error('Please run this from the root directory of readable-stream repository.') diff --git a/examples/capslock-type.cjs b/examples/capslock-type.cjs index 9ea5342ae8..f7b48bdbf2 100644 --- a/examples/capslock-type.cjs +++ b/examples/capslock-type.cjs @@ -1,6 +1,6 @@ 'use strict' -const { Transform } = require('../lib') +const { Transform } = require('../lib/ours/index') class MyStream extends Transform { _transform(chunk, encoding, callback) { diff --git a/examples/typer.mjs b/examples/typer.mjs index e9bed80c38..c063b5d187 100644 --- a/examples/typer.mjs +++ b/examples/typer.mjs @@ -1,8 +1,9 @@ import { createReadStream } from 'node:fs' import process from 'node:process' -import { Readable } from '../lib/index.js' +import { fileURLToPath } from 'node:url' +import { Readable } from '../lib/ours/index.js' -const fst = createReadStream(new URL(import.meta.url).pathname) +const fst = createReadStream(fileURLToPath(new URL(import.meta.url))) const rst = new Readable() rst.wrap(fst) diff --git a/src/test/browser/runner.mjs b/src/test/browser/runner.mjs index 5dd856cc13..d261e51a58 100644 --- a/src/test/browser/runner.mjs +++ b/src/test/browser/runner.mjs @@ -1,9 +1,9 @@ import { resolve } from 'node:path' import { Readable } from 'node:stream' +import { fileURLToPath } from 'node:url' import { chromium, firefox, webkit } from 'playwright' import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' -import { fileURLToPath } from 'url' const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] const validBundlers = ['browserify', 'webpack', 'rollup'] diff --git a/test/browser/runner.mjs b/test/browser/runner.mjs index 5dd856cc13..d261e51a58 100644 --- a/test/browser/runner.mjs +++ b/test/browser/runner.mjs @@ -1,9 +1,9 @@ import { resolve } from 'node:path' import { Readable } from 'node:stream' +import { fileURLToPath } from 'node:url' import { chromium, firefox, webkit } from 'playwright' import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' -import { fileURLToPath } from 'url' const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] const validBundlers = ['browserify', 'webpack', 'rollup'] From a0fcbfe92777d92b487b03661002bb3323a63ba9 Mon Sep 17 00:00:00 2001 From: Shogun Date: Fri, 13 May 2022 16:46:30 +0200 Subject: [PATCH 10/19] feat: Test bundlers. --- .github/workflows/bundlers.yml | 32 +++++++++ .github/workflows/node.yml | 2 +- package.json | 4 +- src/test/browser/fixtures/index.html | 2 +- .../browser/fixtures/rollup.browser.config.js | 2 +- .../browser/fixtures/rollup.node.config.js | 19 +++++ .../{runner.mjs => runner-browser.mjs} | 2 +- src/test/browser/runner-node.mjs | 70 +++++++++++++++++++ src/test/browser/test-browser.js | 15 ++-- src/test/browser/test-stream2-writable.js | 10 +++ test/browser/fixtures/index.html | 2 +- .../browser/fixtures/rollup.browser.config.js | 2 +- test/browser/fixtures/rollup.node.config.js | 18 +++++ .../{runner.mjs => runner-browser.mjs} | 2 +- test/browser/runner-node.mjs | 70 +++++++++++++++++++ test/browser/test-browser.js | 16 +++-- test/browser/test-stream2-writable.js | 8 +++ 17 files changed, 254 insertions(+), 22 deletions(-) create mode 100644 .github/workflows/bundlers.yml create mode 100644 src/test/browser/fixtures/rollup.node.config.js rename src/test/browser/{runner.mjs => runner-browser.mjs} (96%) create mode 100644 src/test/browser/runner-node.mjs create mode 100644 test/browser/fixtures/rollup.node.config.js rename test/browser/{runner.mjs => runner-browser.mjs} (96%) create mode 100644 test/browser/runner-node.mjs diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml new file mode 100644 index 0000000000..1d7d55bfda --- /dev/null +++ b/.github/workflows/bundlers.yml @@ -0,0 +1,32 @@ +name: Bundlers + +on: [push, pull_request] + +jobs: + build: + name: Bundlers + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + node-version: [12.x, 14.x, 16.x, 18.x] + bundler: ['rollup'] + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + - name: Restore cached dependencies + uses: actions/cache@v3 + with: + path: node_modules + key: node-modules-${{ hashFiles('package.json') }} + - name: Install dependencies + run: npm install + - name: Bundle code + run: npm run test:bundlers:prepare:${{ matrix.bundler }} + - name: Run Tests on Browsers + run: npm run test:bundlers ${{ matrix.bundler }} diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml index 77cabff029..5af0dab0eb 100644 --- a/.github/workflows/node.yml +++ b/.github/workflows/node.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest, macOS-latest] + os: [ubuntu-latest, windows-latest, macos-latest] node-version: [12.x, 14.x, 16.x, 18.x] steps: - name: Checkout diff --git a/package.json b/package.json index ef8eeac8ae..60f28edb16 100644 --- a/package.json +++ b/package.json @@ -36,8 +36,10 @@ "build": "node build/build.mjs", "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test:browsers": "node test/browser/runner.mjs", + "test:browsers": "node test/browser/runner-browser.mjs", "test:browsers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.browser.config.js && cp test/browser/fixtures/index.html tmp/rollup", + "test:bundlers": "node test/browser/runner-node.mjs", + "test:bundlers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.node.config.js", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src lib test", "lint": "eslint src" diff --git a/src/test/browser/fixtures/index.html b/src/test/browser/fixtures/index.html index 603eef3c50..16b329e8e6 100644 --- a/src/test/browser/fixtures/index.html +++ b/src/test/browser/fixtures/index.html @@ -67,6 +67,6 @@ originalError(message, ...args) } - + diff --git a/src/test/browser/fixtures/rollup.browser.config.js b/src/test/browser/fixtures/rollup.browser.config.js index 4305c839eb..7259c2584c 100644 --- a/src/test/browser/fixtures/rollup.browser.config.js +++ b/src/test/browser/fixtures/rollup.browser.config.js @@ -8,7 +8,7 @@ export default { input: ['test/browser/test-browser.js'], output: { intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }', - file: 'tmp/rollup/suite.js', + file: 'tmp/rollup/suite.browser.js', format: 'iife', name: 'readableStreamTestSuite' }, diff --git a/src/test/browser/fixtures/rollup.node.config.js b/src/test/browser/fixtures/rollup.node.config.js new file mode 100644 index 0000000000..7eac856bce --- /dev/null +++ b/src/test/browser/fixtures/rollup.node.config.js @@ -0,0 +1,19 @@ +import commonjs from '@rollup/plugin-commonjs' +import nodeResolve from '@rollup/plugin-node-resolve' + +export default { + input: ['test/browser/test-browser.js'], + output: { + file: 'tmp/rollup/suite.node.js', + format: 'cjs', + name: 'readableStreamTestSuite', + exports: 'auto' + }, + plugins: [ + commonjs(), + nodeResolve({ + browser: false, + preferBuiltins: true + }) + ] +} diff --git a/src/test/browser/runner.mjs b/src/test/browser/runner-browser.mjs similarity index 96% rename from src/test/browser/runner.mjs rename to src/test/browser/runner-browser.mjs index d261e51a58..5707b6ea9f 100644 --- a/src/test/browser/runner.mjs +++ b/src/test/browser/runner-browser.mjs @@ -23,7 +23,7 @@ function parseEnviroment() { } if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { - console.error('Usage: node runner.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') + console.error('Usage: node runner-browser.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') console.error('\nYou can also use the BROWSER and BUNDLER environment variables') process.exit(1) } diff --git a/src/test/browser/runner-node.mjs b/src/test/browser/runner-node.mjs new file mode 100644 index 0000000000..ad00c05f61 --- /dev/null +++ b/src/test/browser/runner-node.mjs @@ -0,0 +1,70 @@ +import { resolve } from 'node:path' +import { Duplex } from 'node:stream' +import { fileURLToPath } from 'node:url' +import reporter from 'tap-mocha-reporter' +import Parser from 'tap-parser' + +const validBundlers = ['browserify', 'webpack', 'rollup'] + +function parseEnviroment() { + const reporter = process.env.SKIP_REPORTER !== 'true' + const bundler = process.argv[2] || process.env.BUNDLER + + if (!validBundlers.includes(bundler)) { + console.error('Usage: node runner-node.mjs [browserify|webpack|rollup]') + console.error('\nYou can also use the BUNDLER environment variable') + process.exit(1) + } + + return { bundler, reporter } +} + +function setupTape(configuration) { + const output = new Duplex({ read() {}, write() {} }) + const parser = new Parser({ strict: true }) + + globalThis.logger = function (message, ...args) { + if (typeof message !== 'string') { + console.log(message, ...args) + return + } + + output.push(message + '\n') + } + + output.pipe(parser) + + if (configuration.reporter) { + output.pipe(reporter('spec')) + } + + process.on('uncaughtException', (err) => { + if (global.onerror) { + global.onerror(err) + } else { + process.removeAllListeners('uncaughtException') + throw err + } + }) + + parser.on('line', (line) => { + if (line === '# readable-stream-finished\n') { + output.push(null) + output.end() + return + } else if (line.startsWith('# not ok')) { + process.exitCode = 1 + } + + if (!configuration.reporter) { + console.log(line.replace(/\n$/, '')) + } + }) +} + +const configuration = parseEnviroment() +setupTape(configuration) + +// Execute the test suite +const __dirname = fileURLToPath(new URL('.', import.meta.url)) +await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`) diff --git a/src/test/browser/test-browser.js b/src/test/browser/test-browser.js index ade5ac9463..a0dbab5e5c 100644 --- a/src/test/browser/test-browser.js +++ b/src/test/browser/test-browser.js @@ -1,5 +1,6 @@ 'use strict' +const logger = globalThis.logger || console.log const tape = require('tape') const { createDeferredPromise } = require('../../lib/ours/util') const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols') @@ -42,7 +43,7 @@ async function test(rootName, fn) { const success = harness._exitCode === 0 messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`) - console.log(messages.join('\n')) + logger(messages.join('\n')) completed++ if (!success) { @@ -67,14 +68,14 @@ async function runTests(suites) { clearInterval(interval) - console.log(`1..${totalTests}`) - console.log(`# tests ${totalTests}`) - console.log(`# pass ${completed - failed}`) - console.log(`# fail ${failed}`) - console.log(`# ${failed === 0 ? 'ok' : 'not ok'}`) + logger(`1..${totalTests}`) + logger(`# tests ${totalTests}`) + logger(`# pass ${completed - failed}`) + logger(`# fail ${failed}`) + logger(`# ${failed === 0 ? 'ok' : 'not ok'}`) // This line is used by the playwright script to detect we're done - console.log('# readable-stream-finished') + logger('# readable-stream-finished') }, 100) // Execute each test serially, to avoid side-effects errors when dealing with global error handling diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js index 29de2de572..20d4e9dc5e 100644 --- a/src/test/browser/test-stream2-writable.js +++ b/src/test/browser/test-stream2-writable.js @@ -150,6 +150,11 @@ module.exports = function (test) { chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) } + // In some cases instead there is one byte less + if (actual.length === chunk.length - 1) { + chunk = chunk.slice(0, chunk.length - 1) + } + t.same(actual, chunk, 'got the expected chunks ' + i) }) }) @@ -200,6 +205,11 @@ module.exports = function (test) { chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) } + // In some cases instead there is one byte less + if (actual.length === chunk.length - 1) { + chunk = chunk.slice(0, chunk.length - 1) + } + t.same(actual, chunk, 'got the expected chunks ' + i) }) }) diff --git a/test/browser/fixtures/index.html b/test/browser/fixtures/index.html index 603eef3c50..16b329e8e6 100644 --- a/test/browser/fixtures/index.html +++ b/test/browser/fixtures/index.html @@ -67,6 +67,6 @@ originalError(message, ...args) } - + diff --git a/test/browser/fixtures/rollup.browser.config.js b/test/browser/fixtures/rollup.browser.config.js index 973797de77..d975d9a29c 100644 --- a/test/browser/fixtures/rollup.browser.config.js +++ b/test/browser/fixtures/rollup.browser.config.js @@ -7,7 +7,7 @@ export default { input: ['test/browser/test-browser.js'], output: { intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }', - file: 'tmp/rollup/suite.js', + file: 'tmp/rollup/suite.browser.js', format: 'iife', name: 'readableStreamTestSuite' }, diff --git a/test/browser/fixtures/rollup.node.config.js b/test/browser/fixtures/rollup.node.config.js new file mode 100644 index 0000000000..8d26031bf6 --- /dev/null +++ b/test/browser/fixtures/rollup.node.config.js @@ -0,0 +1,18 @@ +import commonjs from '@rollup/plugin-commonjs' +import nodeResolve from '@rollup/plugin-node-resolve' +export default { + input: ['test/browser/test-browser.js'], + output: { + file: 'tmp/rollup/suite.node.js', + format: 'cjs', + name: 'readableStreamTestSuite', + exports: 'auto' + }, + plugins: [ + commonjs(), + nodeResolve({ + browser: false, + preferBuiltins: true + }) + ] +} diff --git a/test/browser/runner.mjs b/test/browser/runner-browser.mjs similarity index 96% rename from test/browser/runner.mjs rename to test/browser/runner-browser.mjs index d261e51a58..5707b6ea9f 100644 --- a/test/browser/runner.mjs +++ b/test/browser/runner-browser.mjs @@ -23,7 +23,7 @@ function parseEnviroment() { } if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { - console.error('Usage: node runner.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') + console.error('Usage: node runner-browser.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') console.error('\nYou can also use the BROWSER and BUNDLER environment variables') process.exit(1) } diff --git a/test/browser/runner-node.mjs b/test/browser/runner-node.mjs new file mode 100644 index 0000000000..ad00c05f61 --- /dev/null +++ b/test/browser/runner-node.mjs @@ -0,0 +1,70 @@ +import { resolve } from 'node:path' +import { Duplex } from 'node:stream' +import { fileURLToPath } from 'node:url' +import reporter from 'tap-mocha-reporter' +import Parser from 'tap-parser' + +const validBundlers = ['browserify', 'webpack', 'rollup'] + +function parseEnviroment() { + const reporter = process.env.SKIP_REPORTER !== 'true' + const bundler = process.argv[2] || process.env.BUNDLER + + if (!validBundlers.includes(bundler)) { + console.error('Usage: node runner-node.mjs [browserify|webpack|rollup]') + console.error('\nYou can also use the BUNDLER environment variable') + process.exit(1) + } + + return { bundler, reporter } +} + +function setupTape(configuration) { + const output = new Duplex({ read() {}, write() {} }) + const parser = new Parser({ strict: true }) + + globalThis.logger = function (message, ...args) { + if (typeof message !== 'string') { + console.log(message, ...args) + return + } + + output.push(message + '\n') + } + + output.pipe(parser) + + if (configuration.reporter) { + output.pipe(reporter('spec')) + } + + process.on('uncaughtException', (err) => { + if (global.onerror) { + global.onerror(err) + } else { + process.removeAllListeners('uncaughtException') + throw err + } + }) + + parser.on('line', (line) => { + if (line === '# readable-stream-finished\n') { + output.push(null) + output.end() + return + } else if (line.startsWith('# not ok')) { + process.exitCode = 1 + } + + if (!configuration.reporter) { + console.log(line.replace(/\n$/, '')) + } + }) +} + +const configuration = parseEnviroment() +setupTape(configuration) + +// Execute the test suite +const __dirname = fileURLToPath(new URL('.', import.meta.url)) +await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`) diff --git a/test/browser/test-browser.js b/test/browser/test-browser.js index dab1b16de9..b4a22f9c08 100644 --- a/test/browser/test-browser.js +++ b/test/browser/test-browser.js @@ -1,5 +1,7 @@ 'use strict' +const logger = globalThis.logger || console.log + const tape = require('tape') const { createDeferredPromise } = require('../../lib/ours/util') @@ -39,7 +41,7 @@ async function test(rootName, fn) { harness.onFinish(() => { const success = harness._exitCode === 0 messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`) - console.log(messages.join('\n')) + logger(messages.join('\n')) completed++ if (!success) { @@ -61,13 +63,13 @@ async function runTests(suites) { } clearInterval(interval) - console.log(`1..${totalTests}`) - console.log(`# tests ${totalTests}`) - console.log(`# pass ${completed - failed}`) - console.log(`# fail ${failed}`) - console.log(`# ${failed === 0 ? 'ok' : 'not ok'}`) // This line is used by the playwright script to detect we're done + logger(`1..${totalTests}`) + logger(`# tests ${totalTests}`) + logger(`# pass ${completed - failed}`) + logger(`# fail ${failed}`) + logger(`# ${failed === 0 ? 'ok' : 'not ok'}`) // This line is used by the playwright script to detect we're done - console.log('# readable-stream-finished') + logger('# readable-stream-finished') }, 100) // Execute each test serially, to avoid side-effects errors when dealing with global error handling for (const suite of suites) { diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js index f80e77a49e..0e5c9c7563 100644 --- a/test/browser/test-stream2-writable.js +++ b/test/browser/test-stream2-writable.js @@ -139,6 +139,10 @@ module.exports = function (test) { if (actual[actual.length - 1] === 0) { chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } // In some cases instead there is one byte less + + if (actual.length === chunk.length - 1) { + chunk = chunk.slice(0, chunk.length - 1) } t.same(actual, chunk, 'got the expected chunks ' + i) @@ -184,6 +188,10 @@ module.exports = function (test) { if (actual[actual.length - 1] === 0) { chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])]) + } // In some cases instead there is one byte less + + if (actual.length === chunk.length - 1) { + chunk = chunk.slice(0, chunk.length - 1) } t.same(actual, chunk, 'got the expected chunks ' + i) From a6311365cd157c28919bfcf9d05784c3dffd9d1e Mon Sep 17 00:00:00 2001 From: Shogun Date: Sat, 14 May 2022 07:14:33 +0200 Subject: [PATCH 11/19] fix: Fixed node bundle on older versions. --- lib/ours/index.js | 2 +- src/index.js | 2 +- src/test/browser/runner-node.mjs | 17 ++++++++++++----- test/browser/runner-node.mjs | 17 ++++++++++++----- 4 files changed, 26 insertions(+), 12 deletions(-) diff --git a/lib/ours/index.js b/lib/ours/index.js index e365dce318..1a6af8ad86 100644 --- a/lib/ours/index.js +++ b/lib/ours/index.js @@ -3,7 +3,7 @@ const Stream = require('stream') if (Stream && process.env.READABLE_STREAM === 'disable') { - const promises = require('stream/promises') // Explicit export naming is needed for ESM + const promises = Stream.promises // Explicit export naming is needed for ESM module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer module.exports._isUint8Array = Stream._isUint8Array diff --git a/src/index.js b/src/index.js index 6cd12dcfb3..662b7565ac 100644 --- a/src/index.js +++ b/src/index.js @@ -3,7 +3,7 @@ const Stream = require('stream') if (Stream && process.env.READABLE_STREAM === 'disable') { - const promises = require('stream/promises') + const promises = Stream.promises // Explicit export naming is needed for ESM module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer diff --git a/src/test/browser/runner-node.mjs b/src/test/browser/runner-node.mjs index ad00c05f61..d4a9967414 100644 --- a/src/test/browser/runner-node.mjs +++ b/src/test/browser/runner-node.mjs @@ -62,9 +62,16 @@ function setupTape(configuration) { }) } -const configuration = parseEnviroment() -setupTape(configuration) +async function main() { + const configuration = parseEnviroment() + setupTape(configuration) -// Execute the test suite -const __dirname = fileURLToPath(new URL('.', import.meta.url)) -await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`) + // Execute the test suite + const __dirname = fileURLToPath(new URL('.', import.meta.url)) + await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`) +} + +main().catch((e) => { + console.error(e) + process.exit(1) +}) diff --git a/test/browser/runner-node.mjs b/test/browser/runner-node.mjs index ad00c05f61..d4a9967414 100644 --- a/test/browser/runner-node.mjs +++ b/test/browser/runner-node.mjs @@ -62,9 +62,16 @@ function setupTape(configuration) { }) } -const configuration = parseEnviroment() -setupTape(configuration) +async function main() { + const configuration = parseEnviroment() + setupTape(configuration) -// Execute the test suite -const __dirname = fileURLToPath(new URL('.', import.meta.url)) -await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`) + // Execute the test suite + const __dirname = fileURLToPath(new URL('.', import.meta.url)) + await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`) +} + +main().catch((e) => { + console.error(e) + process.exit(1) +}) From 13bbfa5a9a440f1ef64bd80421d3343c75821d0f Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 16 May 2022 07:24:45 +0200 Subject: [PATCH 12/19] test: Test against browserify. --- .github/workflows/browsers.yml | 2 +- .github/workflows/bundlers.yml | 2 +- package.json | 3 +++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index 98603d92da..4e7f7f175f 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -11,7 +11,7 @@ jobs: matrix: os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] browser: ['chrome', 'firefox', 'safari', 'edge'] - bundler: ['rollup'] + bundler: ['browserify', 'rollup'] exclude: - os: ubuntu-latest browser: safari diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml index 1d7d55bfda..1ce694fafe 100644 --- a/.github/workflows/bundlers.yml +++ b/.github/workflows/bundlers.yml @@ -11,7 +11,7 @@ jobs: matrix: os: [ubuntu-latest, windows-latest, macos-latest] node-version: [12.x, 14.x, 16.x, 18.x] - bundler: ['rollup'] + bundler: ['browserify', 'rollup'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/package.json b/package.json index 60f28edb16..7e7fa88f5b 100644 --- a/package.json +++ b/package.json @@ -37,8 +37,10 @@ "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "test:browsers": "node test/browser/runner-browser.mjs", + "test:browsers:prepare:browserify": "rm -rf tmp/browserify && mkdir -p tmp/browserify && browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js && cp test/browser/fixtures/index.html tmp/browserify", "test:browsers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.browser.config.js && cp test/browser/fixtures/index.html tmp/rollup", "test:bundlers": "node test/browser/runner-node.mjs", + "test:bundlers:prepare:browserify": "rm -rf tmp/browserify && mkdir -p tmp/browserify && browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js", "test:bundlers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.node.config.js", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src lib test", @@ -55,6 +57,7 @@ "@rollup/plugin-inject": "^4.0.4", "@rollup/plugin-node-resolve": "^13.3.0", "@sinonjs/fake-timers": "^9.1.2", + "browserify": "^17.0.0", "buffer-es6": "^4.9.3", "c8": "^7.11.2", "eslint": "^8.15.0", From 1d13ee56e4fc68c974f1c8b340c78367769a3cb0 Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 16 May 2022 08:35:49 +0200 Subject: [PATCH 13/19] test: Test against webpack. --- .github/workflows/browsers.yml | 2 +- .github/workflows/bundlers.yml | 2 +- package.json | 10 ++++-- ...er.config.js => rollup.browser.config.mjs} | 0 ....node.config.js => rollup.node.config.mjs} | 0 .../fixtures/webpack.browser.config.mjs | 36 +++++++++++++++++++ .../browser/fixtures/webpack.node.config.mjs | 15 ++++++++ ...er.config.js => rollup.browser.config.mjs} | 1 + ....node.config.js => rollup.node.config.mjs} | 1 + .../fixtures/webpack.browser.config.mjs | 36 +++++++++++++++++++ test/browser/fixtures/webpack.node.config.mjs | 15 ++++++++ 11 files changed, 113 insertions(+), 5 deletions(-) rename src/test/browser/fixtures/{rollup.browser.config.js => rollup.browser.config.mjs} (100%) rename src/test/browser/fixtures/{rollup.node.config.js => rollup.node.config.mjs} (100%) create mode 100644 src/test/browser/fixtures/webpack.browser.config.mjs create mode 100644 src/test/browser/fixtures/webpack.node.config.mjs rename test/browser/fixtures/{rollup.browser.config.js => rollup.browser.config.mjs} (99%) rename test/browser/fixtures/{rollup.node.config.js => rollup.node.config.mjs} (99%) create mode 100644 test/browser/fixtures/webpack.browser.config.mjs create mode 100644 test/browser/fixtures/webpack.node.config.mjs diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index 4e7f7f175f..a8f1dafc49 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -11,7 +11,7 @@ jobs: matrix: os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] browser: ['chrome', 'firefox', 'safari', 'edge'] - bundler: ['browserify', 'rollup'] + bundler: ['browserify', 'rollup', 'webpack'] exclude: - os: ubuntu-latest browser: safari diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml index 1ce694fafe..0ad5178466 100644 --- a/.github/workflows/bundlers.yml +++ b/.github/workflows/bundlers.yml @@ -11,7 +11,7 @@ jobs: matrix: os: [ubuntu-latest, windows-latest, macos-latest] node-version: [12.x, 14.x, 16.x, 18.x] - bundler: ['browserify', 'rollup'] + bundler: ['browserify', 'rollup', 'webpack'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/package.json b/package.json index 7e7fa88f5b..6c62e29953 100644 --- a/package.json +++ b/package.json @@ -38,10 +38,12 @@ "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "test:browsers": "node test/browser/runner-browser.mjs", "test:browsers:prepare:browserify": "rm -rf tmp/browserify && mkdir -p tmp/browserify && browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js && cp test/browser/fixtures/index.html tmp/browserify", - "test:browsers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.browser.config.js && cp test/browser/fixtures/index.html tmp/rollup", + "test:browsers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.browser.config.mjs && cp test/browser/fixtures/index.html tmp/rollup", + "test:browsers:prepare:webpack": "rm -rf tmp/webpack && webpack -c test/browser/fixtures/webpack.browser.config.mjs && cp test/browser/fixtures/index.html tmp/webpack", "test:bundlers": "node test/browser/runner-node.mjs", "test:bundlers:prepare:browserify": "rm -rf tmp/browserify && mkdir -p tmp/browserify && browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js", - "test:bundlers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.node.config.js", + "test:bundlers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.node.config.mjs", + "test:bundlers:prepare:webpack": "rm -rf tmp/webpack && webpack -c test/browser/fixtures/webpack.node.config.mjs", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src lib test", "lint": "eslint src" @@ -74,7 +76,9 @@ "tap-mocha-reporter": "^5.0.3", "tape": "^5.5.3", "tar": "^6.1.11", - "undici": "^5.1.1" + "undici": "^5.1.1", + "webpack": "^5.72.1", + "webpack-cli": "^4.9.2" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" diff --git a/src/test/browser/fixtures/rollup.browser.config.js b/src/test/browser/fixtures/rollup.browser.config.mjs similarity index 100% rename from src/test/browser/fixtures/rollup.browser.config.js rename to src/test/browser/fixtures/rollup.browser.config.mjs diff --git a/src/test/browser/fixtures/rollup.node.config.js b/src/test/browser/fixtures/rollup.node.config.mjs similarity index 100% rename from src/test/browser/fixtures/rollup.node.config.js rename to src/test/browser/fixtures/rollup.node.config.mjs diff --git a/src/test/browser/fixtures/webpack.browser.config.mjs b/src/test/browser/fixtures/webpack.browser.config.mjs new file mode 100644 index 0000000000..bfce45fc5e --- /dev/null +++ b/src/test/browser/fixtures/webpack.browser.config.mjs @@ -0,0 +1,36 @@ +import { createRequire } from 'node:module' +import { resolve } from 'node:path' +import { fileURLToPath } from 'node:url' +import webpack from 'webpack' + +const require = createRequire(import.meta.url) +const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../') + +export default { + entry: './test/browser/test-browser.js', + output: { + filename: 'suite.browser.js', + path: resolve(rootDir, 'tmp/webpack') + }, + mode: 'production', + target: 'web', + performance: false, + plugins: [ + new webpack.BannerPlugin({ + banner: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }', + raw: true + }), + new webpack.ProvidePlugin({ + process: require.resolve('process-es6'), + Buffer: [require.resolve('buffer-es6'), 'Buffer'] + }) + ], + resolve: { + aliasFields: ['browser'], + fallback: { + crypto: require.resolve('crypto-browserify'), + path: require.resolve('path-browserify'), + stream: require.resolve('stream-browserify') + } + } +} diff --git a/src/test/browser/fixtures/webpack.node.config.mjs b/src/test/browser/fixtures/webpack.node.config.mjs new file mode 100644 index 0000000000..e3095152d1 --- /dev/null +++ b/src/test/browser/fixtures/webpack.node.config.mjs @@ -0,0 +1,15 @@ +import { resolve } from 'node:path' +import { fileURLToPath } from 'node:url' + +const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../') + +export default { + entry: './test/browser/test-browser.js', + output: { + filename: 'suite.node.js', + path: resolve(rootDir, 'tmp/webpack') + }, + mode: 'production', + target: 'node', + performance: false +} diff --git a/test/browser/fixtures/rollup.browser.config.js b/test/browser/fixtures/rollup.browser.config.mjs similarity index 99% rename from test/browser/fixtures/rollup.browser.config.js rename to test/browser/fixtures/rollup.browser.config.mjs index d975d9a29c..7259c2584c 100644 --- a/test/browser/fixtures/rollup.browser.config.js +++ b/test/browser/fixtures/rollup.browser.config.mjs @@ -3,6 +3,7 @@ import inject from '@rollup/plugin-inject' import nodeResolve from '@rollup/plugin-node-resolve' import { resolve } from 'node:path' import nodePolyfill from 'rollup-plugin-polyfill-node' + export default { input: ['test/browser/test-browser.js'], output: { diff --git a/test/browser/fixtures/rollup.node.config.js b/test/browser/fixtures/rollup.node.config.mjs similarity index 99% rename from test/browser/fixtures/rollup.node.config.js rename to test/browser/fixtures/rollup.node.config.mjs index 8d26031bf6..7eac856bce 100644 --- a/test/browser/fixtures/rollup.node.config.js +++ b/test/browser/fixtures/rollup.node.config.mjs @@ -1,5 +1,6 @@ import commonjs from '@rollup/plugin-commonjs' import nodeResolve from '@rollup/plugin-node-resolve' + export default { input: ['test/browser/test-browser.js'], output: { diff --git a/test/browser/fixtures/webpack.browser.config.mjs b/test/browser/fixtures/webpack.browser.config.mjs new file mode 100644 index 0000000000..bfce45fc5e --- /dev/null +++ b/test/browser/fixtures/webpack.browser.config.mjs @@ -0,0 +1,36 @@ +import { createRequire } from 'node:module' +import { resolve } from 'node:path' +import { fileURLToPath } from 'node:url' +import webpack from 'webpack' + +const require = createRequire(import.meta.url) +const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../') + +export default { + entry: './test/browser/test-browser.js', + output: { + filename: 'suite.browser.js', + path: resolve(rootDir, 'tmp/webpack') + }, + mode: 'production', + target: 'web', + performance: false, + plugins: [ + new webpack.BannerPlugin({ + banner: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }', + raw: true + }), + new webpack.ProvidePlugin({ + process: require.resolve('process-es6'), + Buffer: [require.resolve('buffer-es6'), 'Buffer'] + }) + ], + resolve: { + aliasFields: ['browser'], + fallback: { + crypto: require.resolve('crypto-browserify'), + path: require.resolve('path-browserify'), + stream: require.resolve('stream-browserify') + } + } +} diff --git a/test/browser/fixtures/webpack.node.config.mjs b/test/browser/fixtures/webpack.node.config.mjs new file mode 100644 index 0000000000..e3095152d1 --- /dev/null +++ b/test/browser/fixtures/webpack.node.config.mjs @@ -0,0 +1,15 @@ +import { resolve } from 'node:path' +import { fileURLToPath } from 'node:url' + +const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../') + +export default { + entry: './test/browser/test-browser.js', + output: { + filename: 'suite.node.js', + path: resolve(rootDir, 'tmp/webpack') + }, + mode: 'production', + target: 'node', + performance: false +} From 5c750db52c0beb40e70e2c303e2c1da07ca75c86 Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 16 May 2022 09:18:36 +0200 Subject: [PATCH 14/19] test: Improve build scripts. --- .github/workflows/browsers.yml | 2 +- .github/workflows/bundlers.yml | 2 +- package.json | 7 +----- src/test/browser/fixtures/prepare.sh | 36 ++++++++++++++++++++++++++++ test/browser/fixtures/prepare.sh | 36 ++++++++++++++++++++++++++++ 5 files changed, 75 insertions(+), 8 deletions(-) create mode 100644 src/test/browser/fixtures/prepare.sh create mode 100644 test/browser/fixtures/prepare.sh diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index a8f1dafc49..125b695bee 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -34,6 +34,6 @@ jobs: - name: Install Browser run: ./node_modules/.bin/playwright install ${{ fromJSON('{"chrome":"chromium","edge":"msedge","firefox":"firefox","safari":"webkit"}')[matrix.browser] }} - name: Bundle code - run: npm run test:browsers:prepare:${{ matrix.bundler }} + run: npm run test:prepare - name: Run Tests on Browsers run: npm run test:browsers ${{ matrix.browser }} ${{ matrix.bundler }} diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml index 0ad5178466..838adc1984 100644 --- a/.github/workflows/bundlers.yml +++ b/.github/workflows/bundlers.yml @@ -27,6 +27,6 @@ jobs: - name: Install dependencies run: npm install - name: Bundle code - run: npm run test:bundlers:prepare:${{ matrix.bundler }} + run: npm run test:prepare - name: Run Tests on Browsers run: npm run test:bundlers ${{ matrix.bundler }} diff --git a/package.json b/package.json index 6c62e29953..eff6dcd1e8 100644 --- a/package.json +++ b/package.json @@ -36,14 +36,9 @@ "build": "node build/build.mjs", "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", + "test:prepare": "/bin/bash test/browser/fixtures/prepare.sh", "test:browsers": "node test/browser/runner-browser.mjs", - "test:browsers:prepare:browserify": "rm -rf tmp/browserify && mkdir -p tmp/browserify && browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js && cp test/browser/fixtures/index.html tmp/browserify", - "test:browsers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.browser.config.mjs && cp test/browser/fixtures/index.html tmp/rollup", - "test:browsers:prepare:webpack": "rm -rf tmp/webpack && webpack -c test/browser/fixtures/webpack.browser.config.mjs && cp test/browser/fixtures/index.html tmp/webpack", "test:bundlers": "node test/browser/runner-node.mjs", - "test:bundlers:prepare:browserify": "rm -rf tmp/browserify && mkdir -p tmp/browserify && browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js", - "test:bundlers:prepare:rollup": "rm -rf tmp/rollup && rollup -c test/browser/fixtures/rollup.node.config.mjs", - "test:bundlers:prepare:webpack": "rm -rf tmp/webpack && webpack -c test/browser/fixtures/webpack.node.config.mjs", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", "format": "prettier -w src lib test", "lint": "eslint src" diff --git a/src/test/browser/fixtures/prepare.sh b/src/test/browser/fixtures/prepare.sh new file mode 100644 index 0000000000..958d89d1ca --- /dev/null +++ b/src/test/browser/fixtures/prepare.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +set -x -e + +[ "$BUNDLER" == "" ] && BUNDLER=$1 + +if [ "$BUNDLER" != "" ]; then + rm -rf tmp/$BUNDLER + mkdir -p tmp/$BUNDLER + cp test/browser/fixtures/index.html tmp/$BUNDLER +fi + +case $BUNDLER in + browserify) + browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js + browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js + ;; + esbuild) + ;; + rollup) + rollup -c test/browser/fixtures/rollup.browser.config.mjs + rollup -c test/browser/fixtures/rollup.node.config.mjs + ;; + swc) + ;; + vite) + ;; + webpack) + webpack -c test/browser/fixtures/webpack.browser.config.mjs + webpack -c test/browser/fixtures/webpack.node.config.mjs + ;; + *) + echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup, swc, vite or webpack." + exit 1 + ;; +esac \ No newline at end of file diff --git a/test/browser/fixtures/prepare.sh b/test/browser/fixtures/prepare.sh new file mode 100644 index 0000000000..958d89d1ca --- /dev/null +++ b/test/browser/fixtures/prepare.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +set -x -e + +[ "$BUNDLER" == "" ] && BUNDLER=$1 + +if [ "$BUNDLER" != "" ]; then + rm -rf tmp/$BUNDLER + mkdir -p tmp/$BUNDLER + cp test/browser/fixtures/index.html tmp/$BUNDLER +fi + +case $BUNDLER in + browserify) + browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js + browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js + ;; + esbuild) + ;; + rollup) + rollup -c test/browser/fixtures/rollup.browser.config.mjs + rollup -c test/browser/fixtures/rollup.node.config.mjs + ;; + swc) + ;; + vite) + ;; + webpack) + webpack -c test/browser/fixtures/webpack.browser.config.mjs + webpack -c test/browser/fixtures/webpack.node.config.mjs + ;; + *) + echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup, swc, vite or webpack." + exit 1 + ;; +esac \ No newline at end of file From 62f8636184c64f8fbb18c88d46ee4e69f2c76a4d Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 16 May 2022 16:22:30 +0200 Subject: [PATCH 15/19] test: Test against esbuild. --- package.json | 2 ++ .../fixtures/esbuild-browsers-shims.mjs | 9 ++++++++ .../fixtures/esbuild.browser.config.mjs | 23 +++++++++++++++++++ .../browser/fixtures/esbuild.node.config.mjs | 8 +++++++ src/test/browser/fixtures/prepare.sh | 8 +++---- src/test/browser/runner-browser.mjs | 4 ++-- src/test/browser/runner-node.mjs | 4 ++-- .../fixtures/esbuild-browsers-shims.mjs | 9 ++++++++ .../fixtures/esbuild.browser.config.mjs | 23 +++++++++++++++++++ test/browser/fixtures/esbuild.node.config.mjs | 8 +++++++ test/browser/fixtures/prepare.sh | 8 +++---- test/browser/runner-browser.mjs | 4 ++-- test/browser/runner-node.mjs | 4 ++-- 13 files changed, 96 insertions(+), 18 deletions(-) create mode 100644 src/test/browser/fixtures/esbuild-browsers-shims.mjs create mode 100644 src/test/browser/fixtures/esbuild.browser.config.mjs create mode 100644 src/test/browser/fixtures/esbuild.node.config.mjs create mode 100644 test/browser/fixtures/esbuild-browsers-shims.mjs create mode 100644 test/browser/fixtures/esbuild.browser.config.mjs create mode 100644 test/browser/fixtures/esbuild.node.config.mjs diff --git a/package.json b/package.json index eff6dcd1e8..008dcadc02 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,8 @@ "browserify": "^17.0.0", "buffer-es6": "^4.9.3", "c8": "^7.11.2", + "esbuild": "^0.14.39", + "esbuild-plugin-alias": "^0.2.1", "eslint": "^8.15.0", "eslint-config-standard": "^17.0.0", "eslint-plugin-import": "^2.26.0", diff --git a/src/test/browser/fixtures/esbuild-browsers-shims.mjs b/src/test/browser/fixtures/esbuild-browsers-shims.mjs new file mode 100644 index 0000000000..9186f40744 --- /dev/null +++ b/src/test/browser/fixtures/esbuild-browsers-shims.mjs @@ -0,0 +1,9 @@ +import * as bufferModule from 'buffer-es6' +import * as processModule from 'process-es6' + +export const process = processModule +export const Buffer = bufferModule.Buffer + +export function setImmediate(fn, ...args) { + setTimeout(() => fn(...args), 1) +} diff --git a/src/test/browser/fixtures/esbuild.browser.config.mjs b/src/test/browser/fixtures/esbuild.browser.config.mjs new file mode 100644 index 0000000000..032f190f07 --- /dev/null +++ b/src/test/browser/fixtures/esbuild.browser.config.mjs @@ -0,0 +1,23 @@ +import { build } from 'esbuild' +import alias from 'esbuild-plugin-alias' +import { createRequire } from 'node:module' + +const require = createRequire(import.meta.url) + +build({ + entryPoints: ['test/browser/test-browser.js'], + outfile: 'tmp/esbuild/suite.browser.js', + bundle: true, + platform: 'browser', + plugins: [ + alias({ + crypto: require.resolve('crypto-browserify'), + path: require.resolve('path-browserify'), + stream: require.resolve('stream-browserify') + }) + ], + define: { + global: 'globalThis' + }, + inject: ['test/browser/fixtures/esbuild-browsers-shims.mjs'] +}).catch(() => process.exit(1)) diff --git a/src/test/browser/fixtures/esbuild.node.config.mjs b/src/test/browser/fixtures/esbuild.node.config.mjs new file mode 100644 index 0000000000..21f70ad284 --- /dev/null +++ b/src/test/browser/fixtures/esbuild.node.config.mjs @@ -0,0 +1,8 @@ +import { build } from 'esbuild' + +build({ + entryPoints: ['test/browser/test-browser.js'], + outfile: 'tmp/esbuild/suite.node.js', + bundle: true, + platform: 'node' +}).catch(() => process.exit(1)) diff --git a/src/test/browser/fixtures/prepare.sh b/src/test/browser/fixtures/prepare.sh index 958d89d1ca..56380d61f4 100644 --- a/src/test/browser/fixtures/prepare.sh +++ b/src/test/browser/fixtures/prepare.sh @@ -16,21 +16,19 @@ case $BUNDLER in browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js ;; esbuild) + node src/test/browser/fixtures/esbuild.browser.config.mjs + node src/test/browser/fixtures/esbuild.node.config.mjs ;; rollup) rollup -c test/browser/fixtures/rollup.browser.config.mjs rollup -c test/browser/fixtures/rollup.node.config.mjs ;; - swc) - ;; - vite) - ;; webpack) webpack -c test/browser/fixtures/webpack.browser.config.mjs webpack -c test/browser/fixtures/webpack.node.config.mjs ;; *) - echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup, swc, vite or webpack." + echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup or webpack." exit 1 ;; esac \ No newline at end of file diff --git a/src/test/browser/runner-browser.mjs b/src/test/browser/runner-browser.mjs index 5707b6ea9f..447224e488 100644 --- a/src/test/browser/runner-browser.mjs +++ b/src/test/browser/runner-browser.mjs @@ -6,7 +6,7 @@ import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] -const validBundlers = ['browserify', 'webpack', 'rollup'] +const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack'] function parseEnviroment() { const headless = process.env.HEADLESS !== 'false' @@ -23,7 +23,7 @@ function parseEnviroment() { } if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { - console.error('Usage: node runner-browser.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') + console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`) console.error('\nYou can also use the BROWSER and BUNDLER environment variables') process.exit(1) } diff --git a/src/test/browser/runner-node.mjs b/src/test/browser/runner-node.mjs index d4a9967414..3298d61f8c 100644 --- a/src/test/browser/runner-node.mjs +++ b/src/test/browser/runner-node.mjs @@ -4,14 +4,14 @@ import { fileURLToPath } from 'node:url' import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' -const validBundlers = ['browserify', 'webpack', 'rollup'] +const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack'] function parseEnviroment() { const reporter = process.env.SKIP_REPORTER !== 'true' const bundler = process.argv[2] || process.env.BUNDLER if (!validBundlers.includes(bundler)) { - console.error('Usage: node runner-node.mjs [browserify|webpack|rollup]') + console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`) console.error('\nYou can also use the BUNDLER environment variable') process.exit(1) } diff --git a/test/browser/fixtures/esbuild-browsers-shims.mjs b/test/browser/fixtures/esbuild-browsers-shims.mjs new file mode 100644 index 0000000000..9186f40744 --- /dev/null +++ b/test/browser/fixtures/esbuild-browsers-shims.mjs @@ -0,0 +1,9 @@ +import * as bufferModule from 'buffer-es6' +import * as processModule from 'process-es6' + +export const process = processModule +export const Buffer = bufferModule.Buffer + +export function setImmediate(fn, ...args) { + setTimeout(() => fn(...args), 1) +} diff --git a/test/browser/fixtures/esbuild.browser.config.mjs b/test/browser/fixtures/esbuild.browser.config.mjs new file mode 100644 index 0000000000..032f190f07 --- /dev/null +++ b/test/browser/fixtures/esbuild.browser.config.mjs @@ -0,0 +1,23 @@ +import { build } from 'esbuild' +import alias from 'esbuild-plugin-alias' +import { createRequire } from 'node:module' + +const require = createRequire(import.meta.url) + +build({ + entryPoints: ['test/browser/test-browser.js'], + outfile: 'tmp/esbuild/suite.browser.js', + bundle: true, + platform: 'browser', + plugins: [ + alias({ + crypto: require.resolve('crypto-browserify'), + path: require.resolve('path-browserify'), + stream: require.resolve('stream-browserify') + }) + ], + define: { + global: 'globalThis' + }, + inject: ['test/browser/fixtures/esbuild-browsers-shims.mjs'] +}).catch(() => process.exit(1)) diff --git a/test/browser/fixtures/esbuild.node.config.mjs b/test/browser/fixtures/esbuild.node.config.mjs new file mode 100644 index 0000000000..21f70ad284 --- /dev/null +++ b/test/browser/fixtures/esbuild.node.config.mjs @@ -0,0 +1,8 @@ +import { build } from 'esbuild' + +build({ + entryPoints: ['test/browser/test-browser.js'], + outfile: 'tmp/esbuild/suite.node.js', + bundle: true, + platform: 'node' +}).catch(() => process.exit(1)) diff --git a/test/browser/fixtures/prepare.sh b/test/browser/fixtures/prepare.sh index 958d89d1ca..56380d61f4 100644 --- a/test/browser/fixtures/prepare.sh +++ b/test/browser/fixtures/prepare.sh @@ -16,21 +16,19 @@ case $BUNDLER in browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js ;; esbuild) + node src/test/browser/fixtures/esbuild.browser.config.mjs + node src/test/browser/fixtures/esbuild.node.config.mjs ;; rollup) rollup -c test/browser/fixtures/rollup.browser.config.mjs rollup -c test/browser/fixtures/rollup.node.config.mjs ;; - swc) - ;; - vite) - ;; webpack) webpack -c test/browser/fixtures/webpack.browser.config.mjs webpack -c test/browser/fixtures/webpack.node.config.mjs ;; *) - echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup, swc, vite or webpack." + echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup or webpack." exit 1 ;; esac \ No newline at end of file diff --git a/test/browser/runner-browser.mjs b/test/browser/runner-browser.mjs index 5707b6ea9f..447224e488 100644 --- a/test/browser/runner-browser.mjs +++ b/test/browser/runner-browser.mjs @@ -6,7 +6,7 @@ import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' const validBrowsers = ['chrome', 'firefox', 'safari', 'edge'] -const validBundlers = ['browserify', 'webpack', 'rollup'] +const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack'] function parseEnviroment() { const headless = process.env.HEADLESS !== 'false' @@ -23,7 +23,7 @@ function parseEnviroment() { } if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { - console.error('Usage: node runner-browser.mjs [chrome|firefox|safari|edge] [browserify|webpack|rollup]') + console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`) console.error('\nYou can also use the BROWSER and BUNDLER environment variables') process.exit(1) } diff --git a/test/browser/runner-node.mjs b/test/browser/runner-node.mjs index d4a9967414..3298d61f8c 100644 --- a/test/browser/runner-node.mjs +++ b/test/browser/runner-node.mjs @@ -4,14 +4,14 @@ import { fileURLToPath } from 'node:url' import reporter from 'tap-mocha-reporter' import Parser from 'tap-parser' -const validBundlers = ['browserify', 'webpack', 'rollup'] +const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack'] function parseEnviroment() { const reporter = process.env.SKIP_REPORTER !== 'true' const bundler = process.argv[2] || process.env.BUNDLER if (!validBundlers.includes(bundler)) { - console.error('Usage: node runner-node.mjs [browserify|webpack|rollup]') + console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`) console.error('\nYou can also use the BUNDLER environment variable') process.exit(1) } From 23dbc5b8247ce5523d0d061864bc7dcac6edb449 Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 16 May 2022 16:30:56 +0200 Subject: [PATCH 16/19] test: Fixed CI. --- .github/workflows/browsers.yml | 2 +- .github/workflows/bundlers.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index 125b695bee..634b09cf45 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -34,6 +34,6 @@ jobs: - name: Install Browser run: ./node_modules/.bin/playwright install ${{ fromJSON('{"chrome":"chromium","edge":"msedge","firefox":"firefox","safari":"webkit"}')[matrix.browser] }} - name: Bundle code - run: npm run test:prepare + run: npm run test:prepare ${{ matrix.bundler }} - name: Run Tests on Browsers run: npm run test:browsers ${{ matrix.browser }} ${{ matrix.bundler }} diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml index 838adc1984..505b2a4f5f 100644 --- a/.github/workflows/bundlers.yml +++ b/.github/workflows/bundlers.yml @@ -27,6 +27,6 @@ jobs: - name: Install dependencies run: npm install - name: Bundle code - run: npm run test:prepare + run: npm run test:prepare ${{ matrix.bundler }} - name: Run Tests on Browsers run: npm run test:bundlers ${{ matrix.bundler }} From 728906cdbb92c1429380040bdf66b9bfd5364896 Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Mon, 16 May 2022 16:39:42 +0200 Subject: [PATCH 17/19] test: Do not use node: prefix. --- src/test/browser/fixtures/esbuild.browser.config.mjs | 2 +- src/test/browser/fixtures/rollup.browser.config.mjs | 2 +- src/test/browser/fixtures/webpack.browser.config.mjs | 6 +++--- src/test/browser/fixtures/webpack.node.config.mjs | 4 ++-- test/browser/fixtures/esbuild.browser.config.mjs | 2 +- test/browser/fixtures/rollup.browser.config.mjs | 2 +- test/browser/fixtures/webpack.browser.config.mjs | 6 +++--- test/browser/fixtures/webpack.node.config.mjs | 4 ++-- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/test/browser/fixtures/esbuild.browser.config.mjs b/src/test/browser/fixtures/esbuild.browser.config.mjs index 032f190f07..6dd371dd9a 100644 --- a/src/test/browser/fixtures/esbuild.browser.config.mjs +++ b/src/test/browser/fixtures/esbuild.browser.config.mjs @@ -1,6 +1,6 @@ import { build } from 'esbuild' import alias from 'esbuild-plugin-alias' -import { createRequire } from 'node:module' +import { createRequire } from 'module' const require = createRequire(import.meta.url) diff --git a/src/test/browser/fixtures/rollup.browser.config.mjs b/src/test/browser/fixtures/rollup.browser.config.mjs index 7259c2584c..43d0e9f4ab 100644 --- a/src/test/browser/fixtures/rollup.browser.config.mjs +++ b/src/test/browser/fixtures/rollup.browser.config.mjs @@ -1,7 +1,7 @@ import commonjs from '@rollup/plugin-commonjs' import inject from '@rollup/plugin-inject' import nodeResolve from '@rollup/plugin-node-resolve' -import { resolve } from 'node:path' +import { resolve } from 'path' import nodePolyfill from 'rollup-plugin-polyfill-node' export default { diff --git a/src/test/browser/fixtures/webpack.browser.config.mjs b/src/test/browser/fixtures/webpack.browser.config.mjs index bfce45fc5e..cd40faa72c 100644 --- a/src/test/browser/fixtures/webpack.browser.config.mjs +++ b/src/test/browser/fixtures/webpack.browser.config.mjs @@ -1,6 +1,6 @@ -import { createRequire } from 'node:module' -import { resolve } from 'node:path' -import { fileURLToPath } from 'node:url' +import { createRequire } from 'module' +import { resolve } from 'path' +import { fileURLToPath } from 'url' import webpack from 'webpack' const require = createRequire(import.meta.url) diff --git a/src/test/browser/fixtures/webpack.node.config.mjs b/src/test/browser/fixtures/webpack.node.config.mjs index e3095152d1..3b20bdef47 100644 --- a/src/test/browser/fixtures/webpack.node.config.mjs +++ b/src/test/browser/fixtures/webpack.node.config.mjs @@ -1,5 +1,5 @@ -import { resolve } from 'node:path' -import { fileURLToPath } from 'node:url' +import { resolve } from 'path' +import { fileURLToPath } from 'url' const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../') diff --git a/test/browser/fixtures/esbuild.browser.config.mjs b/test/browser/fixtures/esbuild.browser.config.mjs index 032f190f07..6dd371dd9a 100644 --- a/test/browser/fixtures/esbuild.browser.config.mjs +++ b/test/browser/fixtures/esbuild.browser.config.mjs @@ -1,6 +1,6 @@ import { build } from 'esbuild' import alias from 'esbuild-plugin-alias' -import { createRequire } from 'node:module' +import { createRequire } from 'module' const require = createRequire(import.meta.url) diff --git a/test/browser/fixtures/rollup.browser.config.mjs b/test/browser/fixtures/rollup.browser.config.mjs index 7259c2584c..43d0e9f4ab 100644 --- a/test/browser/fixtures/rollup.browser.config.mjs +++ b/test/browser/fixtures/rollup.browser.config.mjs @@ -1,7 +1,7 @@ import commonjs from '@rollup/plugin-commonjs' import inject from '@rollup/plugin-inject' import nodeResolve from '@rollup/plugin-node-resolve' -import { resolve } from 'node:path' +import { resolve } from 'path' import nodePolyfill from 'rollup-plugin-polyfill-node' export default { diff --git a/test/browser/fixtures/webpack.browser.config.mjs b/test/browser/fixtures/webpack.browser.config.mjs index bfce45fc5e..cd40faa72c 100644 --- a/test/browser/fixtures/webpack.browser.config.mjs +++ b/test/browser/fixtures/webpack.browser.config.mjs @@ -1,6 +1,6 @@ -import { createRequire } from 'node:module' -import { resolve } from 'node:path' -import { fileURLToPath } from 'node:url' +import { createRequire } from 'module' +import { resolve } from 'path' +import { fileURLToPath } from 'url' import webpack from 'webpack' const require = createRequire(import.meta.url) diff --git a/test/browser/fixtures/webpack.node.config.mjs b/test/browser/fixtures/webpack.node.config.mjs index e3095152d1..3b20bdef47 100644 --- a/test/browser/fixtures/webpack.node.config.mjs +++ b/test/browser/fixtures/webpack.node.config.mjs @@ -1,5 +1,5 @@ -import { resolve } from 'node:path' -import { fileURLToPath } from 'node:url' +import { resolve } from 'path' +import { fileURLToPath } from 'url' const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../') From cfccf9d30d3d22b72f198b342e6a1d4c5f7c387e Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Tue, 17 May 2022 07:53:17 +0200 Subject: [PATCH 18/19] test: Fix CI on Windows. --- .github/workflows/browsers.yml | 2 +- .github/workflows/bundlers.yml | 2 +- build/build.mjs | 14 ++-- package.json | 2 +- src/test/browser/runner-browser.mjs | 2 +- src/test/browser/runner-node.mjs | 2 +- src/test/browser/runner-prepare.mjs | 107 ++++++++++++++++++++++++++++ test/browser/runner-browser.mjs | 2 +- test/browser/runner-node.mjs | 2 +- test/browser/runner-prepare.mjs | 107 ++++++++++++++++++++++++++++ 10 files changed, 230 insertions(+), 12 deletions(-) create mode 100644 src/test/browser/runner-prepare.mjs create mode 100644 test/browser/runner-prepare.mjs diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index 634b09cf45..6d52336fb2 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -11,7 +11,7 @@ jobs: matrix: os: ['ubuntu-latest', 'windows-latest', 'macos-latest'] browser: ['chrome', 'firefox', 'safari', 'edge'] - bundler: ['browserify', 'rollup', 'webpack'] + bundler: ['browserify', 'esbuild', 'rollup', 'webpack'] exclude: - os: ubuntu-latest browser: safari diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml index 505b2a4f5f..c52d580232 100644 --- a/.github/workflows/bundlers.yml +++ b/.github/workflows/bundlers.yml @@ -11,7 +11,7 @@ jobs: matrix: os: [ubuntu-latest, windows-latest, macos-latest] node-version: [12.x, 14.x, 16.x, 18.x] - bundler: ['browserify', 'rollup', 'webpack'] + bundler: ['browserify', 'esbuild', 'rollup', 'webpack'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/build/build.mjs b/build/build.mjs index 8dcc4990e4..66d0a9e1c7 100644 --- a/build/build.mjs +++ b/build/build.mjs @@ -20,10 +20,14 @@ function highlightFile(file, color) { return `\x1b[${color}m${file.replace(process.cwd() + '/', '')}\x1b[0m` } +function info(message) { + console.log(`\x1b[34m[INFO]\x1b[0m ${message}`) +} + async function extract(nodeVersion, tarFile) { const sourcesMatcher = sources.map((s) => new RegExp(s)) - console.log(`Extracting Node.js ${nodeVersion} tar file ...`) + info(`Extracting Node.js ${nodeVersion} tar file ...`) const contents = [] const tarPrefix = `node-v${nodeVersion}/` const parser = new Parse() @@ -124,7 +128,7 @@ async function processFiles(contents) { } // Write the file - console.log(`Creating file ${highlightFile(path, 32)} (${modifications.join(', ')}) ...`) + info(`Creating file ${highlightFile(path, 32)} (${modifications.join(', ')}) ...`) await writeFile(path, content, 'utf-8') } } @@ -132,11 +136,11 @@ async function processFiles(contents) { async function downloadNode(nodeVersion) { // Download node const downloadUrl = `https://nodejs.org/v${nodeVersion}/node-v${nodeVersion}.tar.gz` - console.log(`Downloading ${downloadUrl} ...`) + info(`Downloading ${downloadUrl} ...`) const { statusCode, body } = await request(downloadUrl, { pipelining: 0 }) if (statusCode !== 200) { - console.log(`Downloading failed with HTTP code ${statusCode}.`) + info(`Downloading failed with HTTP code ${statusCode}.`) process.exit(1) } @@ -209,7 +213,7 @@ async function main() { paths.delete('.') for (const path of paths.values()) { - console.log(`Creating directory ${highlightFile(path, 32)} ...`) + info(`Creating directory ${highlightFile(path, 32)} ...`) await mkdir(path, { recursive: true, force: true }) } diff --git a/package.json b/package.json index 008dcadc02..041c331bc4 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "build": "node build/build.mjs", "postbuild": "prettier -w lib test", "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", - "test:prepare": "/bin/bash test/browser/fixtures/prepare.sh", + "test:prepare": "node test/browser/runner-prepare.mjs", "test:browsers": "node test/browser/runner-browser.mjs", "test:bundlers": "node test/browser/runner-node.mjs", "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js", diff --git a/src/test/browser/runner-browser.mjs b/src/test/browser/runner-browser.mjs index 447224e488..e8bb84482c 100644 --- a/src/test/browser/runner-browser.mjs +++ b/src/test/browser/runner-browser.mjs @@ -24,7 +24,7 @@ function parseEnviroment() { if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`) - console.error('\nYou can also use the BROWSER and BUNDLER environment variables') + console.error('You can also use the BROWSER and BUNDLER environment variables.') process.exit(1) } diff --git a/src/test/browser/runner-node.mjs b/src/test/browser/runner-node.mjs index 3298d61f8c..840d19e2dc 100644 --- a/src/test/browser/runner-node.mjs +++ b/src/test/browser/runner-node.mjs @@ -12,7 +12,7 @@ function parseEnviroment() { if (!validBundlers.includes(bundler)) { console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`) - console.error('\nYou can also use the BUNDLER environment variable') + console.error('You can also use the BUNDLER environment variable.') process.exit(1) } diff --git a/src/test/browser/runner-prepare.mjs b/src/test/browser/runner-prepare.mjs new file mode 100644 index 0000000000..76e38f8504 --- /dev/null +++ b/src/test/browser/runner-prepare.mjs @@ -0,0 +1,107 @@ +import { exec } from 'child_process' +import { promises } from 'fs' +import { resolve } from 'path' +import { fileURLToPath } from 'url' +import util from '../../lib/ours/util.js' +const { copyFile, mkdir, rmdir } = promises + +function highlightFile(file) { + return `\x1b[33m${file.replace(process.cwd() + '/', '')}\x1b[0m` +} + +function info(message) { + console.log(`\x1b[34m[INFO]\x1b[0m ${message}`) +} + +function error(message) { + console.log(`\x1b[31m[INFO]\x1b[0m ${message}`) +} + +async function run(command) { + info(`Executing \x1b[33m${command}\x1b[0m ...`) + const { promise, reject, resolve } = util.createDeferredPromise() + + let hasOutput = false + function logOutput(chunk) { + if (!hasOutput) { + hasOutput = true + console.log('') + } + + console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' ')) + } + + try { + const process = exec(command, { stdio: 'pipe' }, (error) => { + if (error) { + return reject(error) + } + + resolve(error) + }) + + process.stdout.on('data', logOutput) + process.stderr.on('data', logOutput) + await promise + + if (hasOutput) { + console.log('') + } + } catch (e) { + if (hasOutput) { + console.log('') + } + + error(`Command failed with status code ${e.code}.`) + process.exit(1) + } +} + +async function main() { + const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack'] + const bundler = process.argv[2] || process.env.BUNDLER + + if (!validBundlers.includes(bundler)) { + error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`) + error('You can also use the BUNDLER environment variable.') + process.exit(1) + } + + const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), `../../tmp/${bundler}`) + const sourceIndex = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../test/browser/fixtures/index.html') + const targetIndex = resolve(rootDir, 'index.html') + + info(`Emptying directory ${highlightFile(rootDir)} ...`) + try { + await rmdir(rootDir, { recursive: true }) + } catch (e) { + // No-op + } + await mkdir(rootDir, { recursive: true }) + + info(`Copying file ${highlightFile(sourceIndex)} to ${highlightFile(targetIndex)} ...`) + await copyFile(sourceIndex, targetIndex) + + switch (bundler) { + case 'browserify': + await run('browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js') + await run('browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js') + break + case 'esbuild': + await run('node src/test/browser/fixtures/esbuild.browser.config.mjs') + await run('node src/test/browser/fixtures/esbuild.node.config.mjs') + break + case 'rollup': + await run('rollup -c test/browser/fixtures/rollup.browser.config.mjs') + await run('rollup -c test/browser/fixtures/rollup.node.config.mjs') + break + case 'webpack': + await run('webpack -c test/browser/fixtures/webpack.browser.config.mjs') + await run('webpack -c test/browser/fixtures/webpack.node.config.mjs') + } +} + +main().catch((e) => { + error(e) + process.exit(1) +}) diff --git a/test/browser/runner-browser.mjs b/test/browser/runner-browser.mjs index 447224e488..e8bb84482c 100644 --- a/test/browser/runner-browser.mjs +++ b/test/browser/runner-browser.mjs @@ -24,7 +24,7 @@ function parseEnviroment() { if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) { console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`) - console.error('\nYou can also use the BROWSER and BUNDLER environment variables') + console.error('You can also use the BROWSER and BUNDLER environment variables.') process.exit(1) } diff --git a/test/browser/runner-node.mjs b/test/browser/runner-node.mjs index 3298d61f8c..840d19e2dc 100644 --- a/test/browser/runner-node.mjs +++ b/test/browser/runner-node.mjs @@ -12,7 +12,7 @@ function parseEnviroment() { if (!validBundlers.includes(bundler)) { console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`) - console.error('\nYou can also use the BUNDLER environment variable') + console.error('You can also use the BUNDLER environment variable.') process.exit(1) } diff --git a/test/browser/runner-prepare.mjs b/test/browser/runner-prepare.mjs new file mode 100644 index 0000000000..76e38f8504 --- /dev/null +++ b/test/browser/runner-prepare.mjs @@ -0,0 +1,107 @@ +import { exec } from 'child_process' +import { promises } from 'fs' +import { resolve } from 'path' +import { fileURLToPath } from 'url' +import util from '../../lib/ours/util.js' +const { copyFile, mkdir, rmdir } = promises + +function highlightFile(file) { + return `\x1b[33m${file.replace(process.cwd() + '/', '')}\x1b[0m` +} + +function info(message) { + console.log(`\x1b[34m[INFO]\x1b[0m ${message}`) +} + +function error(message) { + console.log(`\x1b[31m[INFO]\x1b[0m ${message}`) +} + +async function run(command) { + info(`Executing \x1b[33m${command}\x1b[0m ...`) + const { promise, reject, resolve } = util.createDeferredPromise() + + let hasOutput = false + function logOutput(chunk) { + if (!hasOutput) { + hasOutput = true + console.log('') + } + + console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' ')) + } + + try { + const process = exec(command, { stdio: 'pipe' }, (error) => { + if (error) { + return reject(error) + } + + resolve(error) + }) + + process.stdout.on('data', logOutput) + process.stderr.on('data', logOutput) + await promise + + if (hasOutput) { + console.log('') + } + } catch (e) { + if (hasOutput) { + console.log('') + } + + error(`Command failed with status code ${e.code}.`) + process.exit(1) + } +} + +async function main() { + const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack'] + const bundler = process.argv[2] || process.env.BUNDLER + + if (!validBundlers.includes(bundler)) { + error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`) + error('You can also use the BUNDLER environment variable.') + process.exit(1) + } + + const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), `../../tmp/${bundler}`) + const sourceIndex = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../test/browser/fixtures/index.html') + const targetIndex = resolve(rootDir, 'index.html') + + info(`Emptying directory ${highlightFile(rootDir)} ...`) + try { + await rmdir(rootDir, { recursive: true }) + } catch (e) { + // No-op + } + await mkdir(rootDir, { recursive: true }) + + info(`Copying file ${highlightFile(sourceIndex)} to ${highlightFile(targetIndex)} ...`) + await copyFile(sourceIndex, targetIndex) + + switch (bundler) { + case 'browserify': + await run('browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js') + await run('browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js') + break + case 'esbuild': + await run('node src/test/browser/fixtures/esbuild.browser.config.mjs') + await run('node src/test/browser/fixtures/esbuild.node.config.mjs') + break + case 'rollup': + await run('rollup -c test/browser/fixtures/rollup.browser.config.mjs') + await run('rollup -c test/browser/fixtures/rollup.node.config.mjs') + break + case 'webpack': + await run('webpack -c test/browser/fixtures/webpack.browser.config.mjs') + await run('webpack -c test/browser/fixtures/webpack.node.config.mjs') + } +} + +main().catch((e) => { + error(e) + process.exit(1) +}) From 4c410918a0aa86a7bed232eb3f88c748eaec7e6b Mon Sep 17 00:00:00 2001 From: Paolo Insogna Date: Tue, 17 May 2022 08:31:06 +0200 Subject: [PATCH 19/19] test: Fix ESBuild. --- .github/workflows/browsers.yml | 2 +- .github/workflows/bundlers.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml index 6d52336fb2..ac2b9e6343 100644 --- a/.github/workflows/browsers.yml +++ b/.github/workflows/browsers.yml @@ -28,7 +28,7 @@ jobs: uses: actions/cache@v3 with: path: node_modules - key: node-modules-${{ hashFiles('package.json') }} + key: node-modules-${{ matrix.os }}-${{ hashFiles('package.json') }} - name: Install dependencies run: npm install - name: Install Browser diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml index c52d580232..49f437082b 100644 --- a/.github/workflows/bundlers.yml +++ b/.github/workflows/bundlers.yml @@ -23,7 +23,7 @@ jobs: uses: actions/cache@v3 with: path: node_modules - key: node-modules-${{ hashFiles('package.json') }} + key: node-modules-${{ matrix.os }}-${{ hashFiles('package.json') }} - name: Install dependencies run: npm install - name: Bundle code