Skip to content

Commit

Permalink
stream: add bytesRead property for readable
Browse files Browse the repository at this point in the history
Add a bytesRead property for readable is
useful in some use cases.

When user want know how many bytes read of
readable, need to caculate it in userland.
If encoding is specificed, get the value is
very slowly.

PR-URL: nodejs#4372
Reviewed-By: Trevor Norris <trev.norris@gmail.com>
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
  • Loading branch information
JacksonTian authored and jasnell committed Jan 15, 2016
1 parent ebd9add commit bfb2cd0
Show file tree
Hide file tree
Showing 4 changed files with 127 additions and 5 deletions.
5 changes: 5 additions & 0 deletions doc/api/stream.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,11 @@ readable: null
end
```


#### readable.bytesRead

The amount of read bytes. If `objectMode` is `true`, the value is 0 always.

#### readable.isPaused()

* Return: `Boolean`
Expand Down
3 changes: 3 additions & 0 deletions lib/_stream_readable.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ function Readable(options) {

this._readableState = new ReadableState(options, this);

this.bytesRead = 0;

// legacy
this.readable = true;

Expand Down Expand Up @@ -135,6 +137,7 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
stream.bytesRead += state.objectMode ? 0 : chunk.length;
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);

Expand Down
5 changes: 0 additions & 5 deletions lib/net.js
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ exports._normalizeConnectArgs = normalizeConnectArgs;
// called when creating new Socket, or when re-using a closed Socket
function initSocketHandle(self) {
self.destroyed = false;
self.bytesRead = 0;
self._bytesDispatched = 0;
self._sockname = null;

Expand Down Expand Up @@ -515,10 +514,6 @@ function onread(nread, buffer) {
// will prevent this from being called again until _read() gets
// called again.

// if it's not enough data, we'll just call handle.readStart()
// again right away.
self.bytesRead += nread;

// Optimization: emit the original buffer with end points
var ret = self.push(buffer);

Expand Down
119 changes: 119 additions & 0 deletions test/parallel/test-stream2-readable-bytesread.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
'use strict';

require('../common');
const assert = require('assert');
const Readable = require('stream').Readable;
const Duplex = require('stream').Duplex;
const Transform = require('stream').Transform;

(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
}
});

readable._max = 1000;
readable._index = 1;

var total = 0;
readable.on('data', function(chunk) {
total += chunk.length;
});

readable.on('end', function() {
assert.equal(total, readable.bytesRead);
});
})();

(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
}
});

readable._max = 1000;
readable._index = 1;

var total = 0;
readable.setEncoding('utf8');
readable.on('data', function(chunk) {
total += Buffer.byteLength(chunk);
});

readable.on('end', function() {
assert.equal(total, readable.bytesRead);
});
})();

(function() {
const duplex = new Duplex({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
},
write: function(chunk, encoding, next) {
next();
}
});

duplex._max = 1000;
duplex._index = 1;

var total = 0;
duplex.setEncoding('utf8');
duplex.on('data', function(chunk) {
total += Buffer.byteLength(chunk);
});

duplex.on('end', function() {
assert.equal(total, duplex.bytesRead);
});
})();

(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('{"key":"value"}'));
}
});
readable._max = 1000;
readable._index = 1;

const transform = new Transform({
readableObjectMode : true,
transform: function(chunk, encoding, next) {
next(null, JSON.parse(chunk));
},
flush: function(done) {
done();
}
});

var total = 0;
readable.on('data', function(chunk) {
total += chunk.length;
});

transform.on('end', function() {
assert.equal(0, transform.bytesRead);
assert.equal(total, readable.bytesRead);
});
readable.pipe(transform);
})();

0 comments on commit bfb2cd0

Please sign in to comment.