Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Streaming: Don't load the entire file into memory #17

Merged
merged 5 commits into from May 17, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
16 changes: 16 additions & 0 deletions .zuul.yml
@@ -0,0 +1,16 @@
ui: tape
browsers:
- name: chrome
version: latest
- name: firefox
version: latest
- name: safari
version: latest
- name: ie
version: latest
- name: iphone
version: latest
- name: ipad
version: latest
- name: android
version: latest
9 changes: 6 additions & 3 deletions package.json
Expand Up @@ -4,7 +4,8 @@
"description": "W3C File Reader API streaming interfaces",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"test": "zuul -- test/*.js",
"test-local": "zuul --local -- test/*.js",
"gendocs": "gendocs > README.md"
},
"repository": {
Expand All @@ -25,11 +26,13 @@
"devDependencies": {
"crel": "^2.1.8",
"drag-and-drop-files": "0.0.1",
"feature": "^1.0.0"
"feature": "^1.0.0",
"tape": "^4.0.0",
"zuul": "^3.0.0"
},
"dependencies": {
"extend.js": "0.0.2",
"inherits": "^2.0.1",
"typedarray-to-buffer": "^3.0.0"
}
}
}
88 changes: 45 additions & 43 deletions read.js
Expand Up @@ -15,12 +15,13 @@ function FileReadStream(file, opts) {

// save the read offset
this._offset = 0;
this._eof = false;
this._ready = false;
this._file = file;
this._size = file.size;
this._chunkSize = opts.chunkSize || Math.max(this._size / 1000, 200 * 1024);

// create the reader
this.reader = new FileReader();
this.reader.onprogress = this._handleProgress.bind(this);
this.reader.onload = this._handleLoad.bind(this);

// generate the header blocks that we will send as part of the initial payload
this._generateHeaderBlocks(file, opts, function(err, blocks) {
Expand All @@ -29,8 +30,15 @@ function FileReadStream(file, opts) {
return readStream.emit('error', err);
}

readStream._headerBlocks = blocks || [];
readStream.reader.readAsArrayBuffer(file);
// push the header blocks out to the stream
if (Array.isArray(blocks)) {
blocks.forEach(function (block) {
readStream.push(block);
});
}

readStream._ready = true;
readStream.emit('_ready');
});
}

Expand All @@ -41,50 +49,44 @@ FileReadStream.prototype._generateHeaderBlocks = function(file, opts, callback)
callback(null, []);
};

FileReadStream.prototype._read = function(bytes) {
var stream = this;
FileReadStream.prototype._read = function() {
if (!this._ready) {
this.once('_ready', this._read.bind(this));
return;
}
var readStream = this;
var reader = this.reader;

function checkBytes() {
var startOffset = stream._offset;
var endOffset = stream._offset + bytes;
var availableBytes = reader.result && reader.result.byteLength;
var done = reader.readyState === 2 && endOffset > availableBytes;
var chunk;

// console.log('checking bytes available, need: ' + endOffset + ', got: ' + availableBytes);
if (availableBytes && (done || availableBytes > endOffset)) {
// get the data chunk
chunk = toBuffer(new Uint8Array(
reader.result,
startOffset,
Math.min(bytes, reader.result.byteLength - startOffset)
));

// update the stream offset
stream._offset = startOffset + chunk.length;

// send the chunk
// console.log('sending chunk, ended: ', chunk.length === 0);
stream._eof = chunk.length === 0;
return stream.push(chunk.length > 0 ? chunk : null);
}
var startOffset = this._offset;
var endOffset = this._offset + this._chunkSize;
if (endOffset > this._size) endOffset = this._size;

stream.once('readable', checkBytes);
if (startOffset === this._size) {
this.destroy();
this.push(null);
return;
}

// push the header blocks out to the stream
if (this._headerBlocks.length > 0) {
return this.push(this._headerBlocks.shift());
}
reader.onload = function() {
// update the stream offset
readStream._offset = endOffset;

checkBytes();
};
// get the data chunk
readStream.push(toBuffer(reader.result));
}
reader.onerror = function() {
readStream.emit('error', reader.error);
}

FileReadStream.prototype._handleLoad = function(evt) {
this.emit('readable');
reader.readAsArrayBuffer(this._file.slice(startOffset, endOffset));
};

FileReadStream.prototype._handleProgress = function(evt) {
this.emit('readable');
};
FileReadStream.prototype.destroy = function() {
this._file = null;
if (this.reader) {
this.reader.onload = null;
this.reader.onerror = null;
try { this.reader.abort(); } catch (e) {};
}
this.reader = null;
}
36 changes: 36 additions & 0 deletions test/read.js
@@ -0,0 +1,36 @@
var FileReadStream = require('../').read;
var test = require('tape');

test('read stream (3MB blob)', function(t) {
testReadStream(t, 3 * 1000 * 1000);
});

test('read stream (30MB blob)', function(t) {
testReadStream(t, 30 * 1000 * 1000);
});

test('read stream (300MB blob)', function(t) {
testReadStream(t, 300 * 1000 * 1000);
});

function testReadStream(t, size) {
t.plan(1);
var data = new Buffer(size).fill('abc');
var blob = new Blob([ data.buffer ]);

var stream = new FileReadStream(blob);
stream.on('error', function (err) {
console.error(err);
t.error(err.message);
});

var chunks = [];
stream.on('data', function(chunk) {
chunks.push(chunk);
});

stream.on('end', function() {
var combined = Buffer.concat(chunks);
t.deepEqual(combined, data);
});
}