Skip to content

Commit

Permalink
feat: use modern Streams API (#531)
Browse files Browse the repository at this point in the history
* start with streams

* fix JSON decoder

* save

* Update multipart_parser.js

* use streams API in the multipart example

* use sreaing api

end does not return errors, use 'error' event instead
write does not return length anymore

* use the new multipartparser which is a stream

* revert, add todo

* emit error via recommended stream way

* octet parser is passtrough

* querystringparser is a stream

* Update incoming_form.js

* Dummy parser is a stream

* formatting

* listen for errors earlier

* decouple, don't self check internals

* use existing var

* display name of failing test

* chore: tweaks

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: merge with latest master

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: switch lib/ to src/ in tests

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: comment failing test, fix ti soon

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: make tests passing; ignore workarounds fixture

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: proper ignore; use tap reporter;

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: some formatting happens...

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

* chore: add to changelog

Signed-off-by: Charlike Mike Reagent <opensource@tunnckocore.com>

Co-authored-by: Charlike Mike Reagent <ceo@tunnckocore.com>
  • Loading branch information
2 people authored and tunnckoCore committed Jan 28, 2020
1 parent a26cdac commit 19c252a
Show file tree
Hide file tree
Showing 23 changed files with 516 additions and 463 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
* Improve examples and tests ([#523](https://github.com/node-formidable/node-formidable/pull/523))
* First step of Code quality improvements ([#525](https://github.com/node-formidable/node-formidable/pull/525))
* chore(funding): remove patreon & add npm funding field ([#525](https://github.com/node-formidable/node-formidable/pull/532)
* Modern Streams API ([#531](https://github.com/node-formidable/node-formidable/pull/531))

### v1.2.1 (2018-03-20)

Expand Down
2 changes: 1 addition & 1 deletion example/json.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ server = http.createServer(function(req, res) {
form
.on('error', function(err) {
res.writeHead(500, {'content-type': 'text/plain'});
res.end('error:\n\n'+util.inspect(err));
res.end('error:\n\n' + util.inspect(err));
console.error(err);
})
.on('field', function(field, value) {
Expand Down
34 changes: 13 additions & 21 deletions example/multipartParser.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
const { MultipartParser } = require('../lib/multipart_parser.js');


const multipartParser = new MultipartParser();

// hand crafted multipart
const boundary = '--abcxyz';
const next = '\r\n';
Expand All @@ -11,25 +9,19 @@ const buffer = Buffer.from(
`${boundary}${next}${formData}name="text"${next}${next}text ...${next}${next}${boundary}${next}${formData}name="z"${next}${next}text inside z${next}${next}${boundary}${next}${formData}name="file1"; filename="a.txt"${next}Content-Type: text/plain${next}${next}Content of a.txt.${next}${next}${boundary}${next}${formData}name="file2"; filename="a.html"${next}Content-Type: text/html${next}${next}<!DOCTYPE html><title>Content of a.html.</title>${next}${next}${boundary}--`
);

const logAnalyzed = (buffer, start, end) => {
const multipartParser = new MultipartParser();
multipartParser.on('data', ({name, buffer, start, end}) => {
console.log(`${name}:`);
if (buffer && start && end) {
console.log(String(buffer.slice(start, end)))
console.log(String(buffer.slice(start, end)));
}
};

// multipartParser.onPartBegin
// multipartParser.onPartEnd

// multipartParser.on('partData', logAnalyzed) // non supported syntax
multipartParser.onPartData = logAnalyzed;
multipartParser.onHeaderField = logAnalyzed;
multipartParser.onHeaderValue = logAnalyzed;
multipartParser.initWithBoundary(boundary.substring(2));


const bytesParsed = multipartParser.write(buffer);
const error = multipartParser.end();

if (error) {
console.log();
});
multipartParser.on('error', (error) => {
console.error(error);
}
});

multipartParser.initWithBoundary(boundary.substring(2)); // todo make better error message when it is forgotten
// const shouldWait = !multipartParser.write(buffer);
multipartParser.end();
// multipartParser.destroy();
2 changes: 1 addition & 1 deletion src/default_options.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ const defaultOptions = {
hash: false,
multiples: false,
};

exports.defaultOptions = defaultOptions;
18 changes: 18 additions & 0 deletions src/dummy_parser.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
const { Transform } = require('stream');


class DummyParser extends Transform {
constructor(incomingForm) {
super();
this.incomingForm = incomingForm;
}

_flush(callback) {
this.incomingForm.ended = true;
this.incomingForm._maybeEnd();
callback();
}
}


exports.DummyParser = DummyParser;
2 changes: 1 addition & 1 deletion src/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ function File(properties) {
this.lastModifiedDate = null;

this._writeStream = null;

for (var key in properties) {
this[key] = properties[key];
}
Expand Down
229 changes: 113 additions & 116 deletions src/incoming_form.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ var util = require('util'),
path = require('path'),
File = require('./file'),
defaultOptions = require('./default_options').defaultOptions,
DummyParser = require('./dummy_parser').DummyParser,
MultipartParser = require('./multipart_parser').MultipartParser,
QuerystringParser = require('./querystring_parser').QuerystringParser,
OctetParser = require('./octet_parser').OctetParser,
Expand Down Expand Up @@ -140,10 +141,7 @@ IncomingForm.prototype.parse = function(req, cb) {
return;
}

var err = this._parser.end();
if (err) {
this._error(err);
}
this._parser.end();
});

return this;
Expand All @@ -153,6 +151,9 @@ IncomingForm.prototype.writeHeaders = function(headers) {
this.headers = headers;
this._parseContentLength();
this._parseContentType();
this._parser.once('error', (error) => {
this._error(error);
});
};

IncomingForm.prototype.write = function(buffer) {
Expand All @@ -167,12 +168,9 @@ IncomingForm.prototype.write = function(buffer) {
this.bytesReceived += buffer.length;
this.emit('progress', this.bytesReceived, this.bytesExpected);

var bytesParsed = this._parser.write(buffer);
if (bytesParsed !== buffer.length) {
this._error(new Error(`parser error,${bytesParsed} of ${buffer.length} bytes parsed`));
}
this._parser.write(buffer);

return bytesParsed;
return this.bytesReceived;
};

IncomingForm.prototype.pause = function() {
Expand Down Expand Up @@ -249,19 +247,10 @@ IncomingForm.prototype.handlePart = function(part) {
});
};

function dummyParser(incomingForm) {
return {
end: function () {
incomingForm.ended = true;
incomingForm._maybeEnd();
return null;
}
};
}

IncomingForm.prototype._parseContentType = function() {
if (this.bytesExpected === 0) {
this._parser = dummyParser(this);
this._parser = new DummyParser(this);
return;
}

Expand Down Expand Up @@ -341,98 +330,103 @@ IncomingForm.prototype._initMultipart = function(boundary) {

parser.initWithBoundary(boundary);

parser.onPartBegin = function() {
part = new Stream();
part.readable = true;
part.headers = {};
part.name = null;
part.filename = null;
part.mime = null;

part.transferEncoding = 'binary';
part.transferBuffer = '';

headerField = '';
headerValue = '';
};

parser.onHeaderField = (b, start, end) => {
headerField += b.toString(this.encoding, start, end);
};

parser.onHeaderValue = (b, start, end) => {
headerValue += b.toString(this.encoding, start, end);
};

parser.onHeaderEnd = () => {
headerField = headerField.toLowerCase();
part.headers[headerField] = headerValue;

// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
var m = headerValue.match(/\bname=("([^"]*)"|([^\(\)<>@,;:\\"\/\[\]\?=\{\}\s\t/]+))/i);
if (headerField == 'content-disposition') {
if (m) {
part.name = m[2] || m[3] || '';
}
parser.on('data', ({name, buffer, start, end}) => {
if (name === 'partBegin') {
part = new Stream();
part.readable = true;
part.headers = {};
part.name = null;
part.filename = null;
part.mime = null;

part.transferEncoding = 'binary';
part.transferBuffer = '';

headerField = '';
headerValue = '';
} else if (name === 'headerField') {
headerField += buffer.toString(this.encoding, start, end);
} else if (name === 'headerValue') {
headerValue += buffer.toString(this.encoding, start, end);
} else if (name === 'headerEnd') {
headerField = headerField.toLowerCase();
part.headers[headerField] = headerValue;

// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
var m = headerValue.match(/\bname=("([^"]*)"|([^\(\)<>@,;:\\"\/\[\]\?=\{\}\s\t/]+))/i);
if (headerField == 'content-disposition') {
if (m) {
part.name = m[2] || m[3] || '';
}

part.filename = this._fileName(headerValue);
} else if (headerField == 'content-type') {
part.mime = headerValue;
} else if (headerField == 'content-transfer-encoding') {
part.transferEncoding = headerValue.toLowerCase();
}
part.filename = this._fileName(headerValue);
} else if (headerField == 'content-type') {
part.mime = headerValue;
} else if (headerField == 'content-transfer-encoding') {
part.transferEncoding = headerValue.toLowerCase();
}

headerField = '';
headerValue = '';
};
headerField = '';
headerValue = '';
} else if (name === 'headersEnd') {

switch(part.transferEncoding){
case 'binary':
case '7bit':
case '8bit': {
const dataPropagation = ({name, buffer, start, end}) => {
if (name === 'partData') {
part.emit('data', buffer.slice(start, end));
}
};
const dataStopPropagation = ({name}) => {
if (name === 'partEnd') {
part.emit('end');
parser.off('data', dataPropagation);
parser.off('data', dataStopPropagation);
}
};
parser.on('data', dataPropagation);
parser.on('data', dataStopPropagation);
break;
} case 'base64': {
const dataPropagation = ({name, buffer, start, end}) => {
if (name === 'partData') {
part.transferBuffer += buffer.slice(start, end).toString('ascii');

/*
four bytes (chars) in base64 converts to three bytes in binary
encoding. So we should always work with a number of bytes that
can be divided by 4, it will result in a number of buytes that
can be divided vy 3.
*/
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4;
part.emit('data', Buffer.from(part.transferBuffer.substring(0, offset), 'base64'));
part.transferBuffer = part.transferBuffer.substring(offset);
}
};
const dataStopPropagation = ({name}) => {
if (name === 'partEnd') {
part.emit('data', Buffer.from(part.transferBuffer, 'base64'));
part.emit('end');
parser.off('data', dataPropagation);
parser.off('data', dataStopPropagation);
}
};
parser.on('data', dataPropagation);
parser.on('data', dataStopPropagation);
break;

} default:
return this._error(new Error('unknown transfer-encoding'));
}

parser.onHeadersEnd = () => {
switch(part.transferEncoding){
case 'binary':
case '7bit':
case '8bit':
parser.onPartData = function(b, start, end) {
part.emit('data', b.slice(start, end));
};

parser.onPartEnd = function() {
part.emit('end');
};
break;

case 'base64':
parser.onPartData = function(b, start, end) {
part.transferBuffer += b.slice(start, end).toString('ascii');

/*
four bytes (chars) in base64 converts to three bytes in binary
encoding. So we should always work with a number of bytes that
can be divided by 4, it will result in a number of buytes that
can be divided vy 3.
*/
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4;
part.emit('data', Buffer.from(part.transferBuffer.substring(0, offset), 'base64'));
part.transferBuffer = part.transferBuffer.substring(offset);
};

parser.onPartEnd = function() {
part.emit('data', Buffer.from(part.transferBuffer, 'base64'));
part.emit('end');
};
break;

default:
return this._error(new Error('unknown transfer-encoding'));
this.onPart(part);
} else if (name === 'end') {
this.ended = true;
this._maybeEnd();
}

this.onPart(part);
};


parser.onEnd = () => {
this.ended = true;
this._maybeEnd();
};
});

this._parser = parser;
};
Expand All @@ -456,9 +450,9 @@ IncomingForm.prototype._initUrlencoded = function() {

var parser = new QuerystringParser(this.maxFields);

parser.onField = (key, val) => {
this.emit('field', key, val);
};
parser.on('data', ({key, value}) => {
this.emit('field', key, value);
});

parser.onEnd = () => {
this.ended = true;
Expand Down Expand Up @@ -525,16 +519,19 @@ IncomingForm.prototype._initOctetStream = function() {
IncomingForm.prototype._initJSONencoded = function() {
this.type = 'json';

var parser = new JSONParser(this);
var parser = new JSONParser();

parser.onField = (key, val) => {
this.emit('field', key, val);
};
parser.on('data', ({ key, value }) => {
this.emit('field', key, value);
});
// parser.on('data', (key) => {
// this.emit('field', key);
// });

parser.onEnd = () => {
parser.once('end', () => {
this.ended = true;
this._maybeEnd();
};
});

this._parser = parser;
};
Expand Down

0 comments on commit 19c252a

Please sign in to comment.