Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

stream: add bytesRead property for readable #4372

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions doc/api/stream.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,11 @@ readable: null
end
```


#### readable.bytesRead

The amount of read bytes. If `objectMode` is `true`, the value is 0 always.

#### readable.isPaused()

* Return: `Boolean`
Expand Down
3 changes: 3 additions & 0 deletions lib/_stream_readable.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ function Readable(options) {

this._readableState = new ReadableState(options, this);

this.bytesRead = 0;

// legacy
this.readable = true;

Expand Down Expand Up @@ -135,6 +137,7 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
stream.bytesRead += state.objectMode ? 0 : chunk.length;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are you really sure this tracks bytes and not characters?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is an edge case, when encoding of this.push(string) is same as encoding readable, the string was just pass though.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds like a bit of a problem then? :(

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need to revert this?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think so.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR coming

if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);

Expand Down
5 changes: 0 additions & 5 deletions lib/net.js
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ exports._normalizeConnectArgs = normalizeConnectArgs;
// called when creating new Socket, or when re-using a closed Socket
function initSocketHandle(self) {
self.destroyed = false;
self.bytesRead = 0;
self._bytesDispatched = 0;
self._sockname = null;

Expand Down Expand Up @@ -515,10 +514,6 @@ function onread(nread, buffer) {
// will prevent this from being called again until _read() gets
// called again.

// if it's not enough data, we'll just call handle.readStart()
// again right away.
self.bytesRead += nread;

// Optimization: emit the original buffer with end points
var ret = self.push(buffer);

Expand Down
119 changes: 119 additions & 0 deletions test/parallel/test-stream2-readable-bytesread.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
'use strict';

require('../common');
const assert = require('assert');
const Readable = require('stream').Readable;
const Duplex = require('stream').Duplex;
const Transform = require('stream').Transform;

(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
}
});

readable._max = 1000;
readable._index = 1;

var total = 0;
readable.on('data', function(chunk) {
total += chunk.length;
});

readable.on('end', function() {
assert.equal(total, readable.bytesRead);
});
})();

(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
}
});

readable._max = 1000;
readable._index = 1;

var total = 0;
readable.setEncoding('utf8');
readable.on('data', function(chunk) {
total += Buffer.byteLength(chunk);
});

readable.on('end', function() {
assert.equal(total, readable.bytesRead);
});
})();

(function() {
const duplex = new Duplex({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('a'));
},
write: function(chunk, encoding, next) {
next();
}
});

duplex._max = 1000;
duplex._index = 1;

var total = 0;
duplex.setEncoding('utf8');
duplex.on('data', function(chunk) {
total += Buffer.byteLength(chunk);
});

duplex.on('end', function() {
assert.equal(total, duplex.bytesRead);
});
})();

(function() {
const readable = new Readable({
read: function(n) {
var i = this._index++;
if (i > this._max)
this.push(null);
else
this.push(new Buffer('{"key":"value"}'));
}
});
readable._max = 1000;
readable._index = 1;

const transform = new Transform({
readableObjectMode : true,
transform: function(chunk, encoding, next) {
next(null, JSON.parse(chunk));
},
flush: function(done) {
done();
}
});

var total = 0;
readable.on('data', function(chunk) {
total += chunk.length;
});

transform.on('end', function() {
assert.equal(0, transform.bytesRead);
assert.equal(total, readable.bytesRead);
});
readable.pipe(transform);
})();