@@ -377,6 +377,154 @@ added: v10.0.0
377377
378378* Type: {number} The numeric file descriptor managed by the {FileHandle} object.
379379
380+ #### ` filehandle .pull ([... transforms][, options])`
381+
382+ <!-- YAML
383+ added: REPLACEME
384+ -->
385+
386+ > Stability: 1 - Experimental
387+
388+ * ` ... transforms` {Function|Object} Optional transforms to apply via
389+ [` stream/ iter pull ()` ][].
390+ * ` options` {Object}
391+ * ` signal` {AbortSignal}
392+ * ` autoClose` {boolean} Close the file handle when the stream ends.
393+ **Default:** ` false ` .
394+ * ` start` {number} Byte offset to begin reading from. When specified,
395+ reads use explicit positioning (` pread` semantics). **Default:** current
396+ file position.
397+ * ` limit` {number} Maximum number of bytes to read before ending the
398+ iterator. Reads stop when ` limit` bytes have been delivered or EOF is
399+ reached, whichever comes first. **Default:** read until EOF.
400+ * ` chunkSize` {number} Size in bytes of the buffer allocated for each
401+ read operation. **Default:** ` 131072 ` (128 KB).
402+ * Returns: {AsyncIterable\< Uint8Array\[ ]>}
403+
404+ Return the file contents as an async iterable using the
405+ [` node: stream/ iter` ][] pull model. Reads are performed in ` chunkSize` -byte
406+ chunks (default 128 KB). If transforms are provided, they are applied
407+ via [` stream/ iter pull ()` ][].
408+
409+ The file handle is locked while the iterable is being consumed and unlocked
410+ when iteration completes, an error occurs, or the consumer breaks.
411+
412+ This function is only available when the ` -- experimental- stream- iter` flag is
413+ enabled.
414+
415+ ` ` ` mjs
416+ import { open } from ' node:fs/promises' ;
417+ import { text } from ' node:stream/iter' ;
418+ import { compressGzip } from ' node:zlib/iter' ;
419+
420+ const fh = await open (' input.txt' , ' r' );
421+
422+ // Read as text
423+ console .log (await text (fh .pull ({ autoClose: true })));
424+
425+ // Read 1 KB starting at byte 100
426+ const fh2 = await open (' input.txt' , ' r' );
427+ console .log (await text (fh2 .pull ({ start: 100 , limit: 1024 , autoClose: true })));
428+
429+ // Read with compression
430+ const fh3 = await open (' input.txt' , ' r' );
431+ const compressed = fh3 .pull (compressGzip (), { autoClose: true });
432+ ` ` `
433+
434+ ` ` ` cjs
435+ const { open } = require (' node:fs/promises' );
436+ const { text } = require (' node:stream/iter' );
437+ const { compressGzip } = require (' node:zlib/iter' );
438+
439+ async function run () {
440+ const fh = await open (' input.txt' , ' r' );
441+
442+ // Read as text
443+ console .log (await text (fh .pull ({ autoClose: true })));
444+
445+ // Read 1 KB starting at byte 100
446+ const fh2 = await open (' input.txt' , ' r' );
447+ console .log (await text (fh2 .pull ({ start: 100 , limit: 1024 , autoClose: true })));
448+
449+ // Read with compression
450+ const fh3 = await open (' input.txt' , ' r' );
451+ const compressed = fh3 .pull (compressGzip (), { autoClose: true });
452+ }
453+
454+ run ().catch (console .error );
455+ ` ` `
456+
457+ #### ` filehandle .pullSync ([... transforms][, options])`
458+
459+ <!-- YAML
460+ added: REPLACEME
461+ -->
462+
463+ > Stability: 1 - Experimental
464+
465+ * ` ... transforms` {Function|Object} Optional transforms to apply via
466+ [` stream/ iter pullSync ()` ][].
467+ * ` options` {Object}
468+ * ` autoClose` {boolean} Close the file handle when the stream ends.
469+ **Default:** ` false ` .
470+ * ` start` {number} Byte offset to begin reading from. When specified,
471+ reads use explicit positioning. **Default:** current file position.
472+ * ` limit` {number} Maximum number of bytes to read before ending the
473+ iterator. **Default:** read until EOF.
474+ * ` chunkSize` {number} Size in bytes of the buffer allocated for each
475+ read operation. **Default:** ` 131072 ` (128 KB).
476+ * Returns: {Iterable\< Uint8Array\[ ]>}
477+
478+ Synchronous counterpart of [` filehandle .pull ()` ][]. Returns a sync iterable
479+ that reads the file using synchronous I/O on the main thread. Reads are
480+ performed in ` chunkSize` -byte chunks (default 128 KB).
481+
482+ The file handle is locked while the iterable is being consumed. Unlike the
483+ async ` pull ()` , this method does not support ` AbortSignal` since all
484+ operations are synchronous.
485+
486+ This function is only available when the ` -- experimental- stream- iter` flag is
487+ enabled.
488+
489+ ` ` ` mjs
490+ import { open } from ' node:fs/promises' ;
491+ import { textSync , pipeToSync } from ' node:stream/iter' ;
492+ import { compressGzipSync , decompressGzipSync } from ' node:zlib/iter' ;
493+
494+ const fh = await open (' input.txt' , ' r' );
495+
496+ // Read as text (sync)
497+ console .log (textSync (fh .pullSync ({ autoClose: true })));
498+
499+ // Sync compress pipeline: file -> gzip -> file
500+ const src = await open (' input.txt' , ' r' );
501+ const dst = await open (' output.gz' , ' w' );
502+ pipeToSync (src .pullSync (compressGzipSync (), { autoClose: true }), dst .writer ({ autoClose: true }));
503+ ` ` `
504+
505+ ` ` ` cjs
506+ const { open } = require (' node:fs/promises' );
507+ const { textSync , pipeToSync } = require (' node:stream/iter' );
508+ const { compressGzipSync , decompressGzipSync } = require (' node:zlib/iter' );
509+
510+ async function run () {
511+ const fh = await open (' input.txt' , ' r' );
512+
513+ // Read as text (sync)
514+ console .log (textSync (fh .pullSync ({ autoClose: true })));
515+
516+ // Sync compress pipeline: file -> gzip -> file
517+ const src = await open (' input.txt' , ' r' );
518+ const dst = await open (' output.gz' , ' w' );
519+ pipeToSync (
520+ src .pullSync (compressGzipSync (), { autoClose: true }),
521+ dst .writer ({ autoClose: true }),
522+ );
523+ }
524+
525+ run ().catch (console .error );
526+ ` ` `
527+
380528#### ` filehandle .read (buffer, offset, length, position)`
381529
382530<!-- YAML
@@ -859,6 +1007,121 @@ On Linux, positional writes don't work when the file is opened in append mode.
8591007The kernel ignores the position argument and always appends the data to
8601008the end of the file.
8611009
1010+ #### ` filehandle .writer ([options])`
1011+
1012+ <!-- YAML
1013+ added: REPLACEME
1014+ -->
1015+
1016+ > Stability: 1 - Experimental
1017+
1018+ * ` options` {Object}
1019+ * ` autoClose` {boolean} Close the file handle when the writer ends or
1020+ fails. **Default:** ` false ` .
1021+ * ` start` {number} Byte offset to start writing at. When specified,
1022+ writes use explicit positioning. **Default:** current file position.
1023+ * ` limit` {number} Maximum number of bytes the writer will accept.
1024+ Async writes (` write ()` , ` writev ()` ) that would exceed the limit reject
1025+ with ` ERR_OUT_OF_RANGE ` . Sync writes (` writeSync ()` , ` writevSync ()` )
1026+ return ` false ` . **Default:** no limit.
1027+ * ` chunkSize` {number} Maximum chunk size in bytes for synchronous write
1028+ operations. Writes larger than this threshold fall back to async I/O.
1029+ Set this to match the reader's ` chunkSize` for optimal ` pipeTo ()`
1030+ performance. **Default:** ` 131072 ` (128 KB).
1031+ * Returns: {Object}
1032+ * ` write (chunk[, options])` {Function} Returns {Promise\< void>}.
1033+ Accepts ` Uint8Array ` , ` Buffer` , or string (UTF-8 encoded).
1034+ * ` chunk` {Buffer|TypedArray|DataView|string}
1035+ * ` options` {Object}
1036+ * ` signal` {AbortSignal} If the signal is already aborted, the write
1037+ rejects with ` AbortError` without performing I/O.
1038+ * ` writev (chunks[, options])` {Function} Returns {Promise\< void>}. Uses
1039+ scatter/gather I/O via a single ` writev ()` syscall. Accepts mixed
1040+ ` Uint8Array ` /string arrays.
1041+ * ` chunks` {Array\< Buffer|TypedArray|DataView|string>}
1042+ * ` options` {Object}
1043+ * ` signal` {AbortSignal} If the signal is already aborted, the write
1044+ rejects with ` AbortError` without performing I/O.
1045+ * ` writeSync (chunk)` {Function} Returns {boolean}. Attempts a synchronous
1046+ write. Returns ` true ` if the write succeeded, ` false ` if the caller
1047+ should fall back to async ` write ()` . Returns ` false ` when: the writer
1048+ is closed/errored, an async operation is in flight, the chunk exceeds
1049+ ` chunkSize` , or the write would exceed ` limit` .
1050+ * ` chunk` {Buffer|TypedArray|DataView|string}
1051+ * ` writevSync (chunks)` {Function} Returns {boolean}. Synchronous batch
1052+ write. Same fallback semantics as ` writeSync ()` .
1053+ * ` chunks` {Array\< Buffer|TypedArray|DataView|string>}
1054+ * ` end ([options])` {Function} Returns {Promise\< number>} total bytes
1055+ written. Idempotent: returns ` totalBytesWritten` if already closed,
1056+ returns the pending promise if already closing. Rejects if the writer
1057+ is in an errored state.
1058+ * ` options` {Object}
1059+ * ` signal` {AbortSignal} If the signal is already aborted, ` end ()`
1060+ rejects with ` AbortError` and the writer remains open.
1061+ * ` endSync ()` {Function} Returns {number|number} total bytes written on
1062+ success, ` - 1 ` if the writer is errored or an async operation is in
1063+ flight. Idempotent when already closed.
1064+ * ` fail (reason)` {Function} Puts the writer into a terminal error state.
1065+ Synchronous. If the writer is already closed or errored, this is a
1066+ no-op. If ` autoClose` is true, closes the file handle synchronously.
1067+
1068+ Return a [` node: stream/ iter` ][] writer backed by this file handle.
1069+
1070+ The writer supports both ` Symbol .asyncDispose ` and ` Symbol .dispose ` :
1071+
1072+ * ` await using w = fh .writer ()` — if the writer is still open (no ` end ()`
1073+ called), ` asyncDispose` calls ` fail ()` . If ` end ()` is pending, it waits
1074+ for it to complete.
1075+ * ` using w = fh .writer ()` — calls ` fail ()` unconditionally.
1076+
1077+ The ` writeSync ()` and ` writevSync ()` methods enable the try-sync fast path
1078+ used by [` stream/ iter pipeTo ()` ][]. When the reader's chunk size matches the
1079+ writer's ` chunkSize` , all writes in a ` pipeTo ()` pipeline complete
1080+ synchronously with zero promise overhead.
1081+
1082+ This function is only available when the ` -- experimental- stream- iter` flag is
1083+ enabled.
1084+
1085+ ` ` ` mjs
1086+ import { open } from ' node:fs/promises' ;
1087+ import { from , pipeTo } from ' node:stream/iter' ;
1088+ import { compressGzip } from ' node:zlib/iter' ;
1089+
1090+ // Async pipeline
1091+ const fh = await open (' output.gz' , ' w' );
1092+ await pipeTo (from (' Hello!' ), compressGzip (), fh .writer ({ autoClose: true }));
1093+
1094+ // Sync pipeline with limit
1095+ const src = await open (' input.txt' , ' r' );
1096+ const dst = await open (' output.txt' , ' w' );
1097+ const w = dst .writer ({ limit: 1024 * 1024 }); // Max 1 MB
1098+ await pipeTo (src .pull ({ autoClose: true }), w);
1099+ await w .end ();
1100+ await dst .close ();
1101+ ` ` `
1102+
1103+ ` ` ` cjs
1104+ const { open } = require (' node:fs/promises' );
1105+ const { from , pipeTo } = require (' node:stream/iter' );
1106+ const { compressGzip } = require (' node:zlib/iter' );
1107+
1108+ async function run () {
1109+ // Async pipeline
1110+ const fh = await open (' output.gz' , ' w' );
1111+ await pipeTo (from (' Hello!' ), compressGzip (), fh .writer ({ autoClose: true }));
1112+
1113+ // Sync pipeline with limit
1114+ const src = await open (' input.txt' , ' r' );
1115+ const dst = await open (' output.txt' , ' w' );
1116+ const w = dst .writer ({ limit: 1024 * 1024 }); // Max 1 MB
1117+ await pipeTo (src .pull ({ autoClose: true }), w);
1118+ await w .end ();
1119+ await dst .close ();
1120+ }
1121+
1122+ run ().catch (console .error );
1123+ ` ` `
1124+
8621125#### ` filehandle[Symbol .asyncDispose ]()`
8631126
8641127<!-- YAML
@@ -8737,6 +9000,7 @@ the file contents.
87379000[` event ports` ]: https://illumos.org/man/port_create
87389001[` filehandle .createReadStream ()` ]: #filehandlecreatereadstreamoptions
87399002[` filehandle .createWriteStream ()` ]: #filehandlecreatewritestreamoptions
9003+ [` filehandle .pull ()` ]: #filehandlepulltransforms-options
87409004[` filehandle .writeFile ()` ]: #filehandlewritefiledata-options
87419005[` fs .access ()` ]: #fsaccesspath-mode-callback
87429006[` fs .accessSync ()` ]: #fsaccesssyncpath-mode
@@ -8787,6 +9051,10 @@ the file contents.
87879051[` inotify (7 )` ]: https://man7.org/linux/man-pages/man7/inotify.7.html
87889052[` kqueue (2 )` ]: https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2
87899053[` minimatch` ]: https://github.com/isaacs/minimatch
9054+ [` node: stream/ iter` ]: stream_iter.md
9055+ [` stream/ iter pipeTo ()` ]: stream_iter.md#pipetosource-transforms-writer
9056+ [` stream/ iter pull ()` ]: stream_iter.md#pullsource-transforms-options
9057+ [` stream/ iter pullSync ()` ]: stream_iter.md#pullsyncsource-transforms
87909058[` util .promisify ()` ]: util.md#utilpromisifyoriginal
87919059[bigints]: https://tc39.github.io/proposal-bigint
87929060[caveats]: #caveats
0 commit comments