Skip to content
This repository has been archived by the owner on Feb 8, 2023. It is now read-only.

feat: init stream to buffer #1

Merged
merged 7 commits into from
Feb 28, 2018
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .autod.conf.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
'use strict';

module.exports = {
write: true,
prefix: '^',
test: [
'test',
'benchmark',
],
devdep: [
'egg-bin',
'egg-ci',
'autod',
'eslint',
'eslint-config-egg',
],
};
3 changes: 3 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"extends": "eslint-config-egg"
}
7 changes: 7 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
logs/
npm-debug.log
node_modules/
coverage/
.idea/
.DS_Store
*.swp
11 changes: 11 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
sudo: false
language: node_js
node_js:
- '8'
- '9'
install:
- npm i npminstall && npminstall
script:
- npm run ci
after_script:
- npminstall codecov && codecov
19 changes: 19 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,22 @@ stream-to-buf
====

Collect a readable stream's data into buffer. Support max size limit.

## Install

```bash
npm i stream-to-buf
```

## Usage

```js
const streamTobuffer = require('stream-to-buf');

const stream = fs.createReadStream(path.join(__dirname, 'fixtures/file'));
streamToBuffer(stream, { maxSize: '10kb' }).then(onBuffer, onError);
```

### Options

- `maxSize`: If stream's emitted buffer exceed `maxSize`, it will throw an error.(but it will still consume the stream before resolve).
15 changes: 15 additions & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
environment:
matrix:
- nodejs_version: '8'
- nodejs_version: '9'

install:
- ps: Install-Product node $env:nodejs_version
- npm i npminstall && node_modules\.bin\npminstall

test_script:
- node --version
- npm --version
- npm run test

build: off
57 changes: 57 additions & 0 deletions index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
'use strict';

const bytes = require('humanize-bytes');

module.exports = (stream, options = {}) => {
return new Promise((resolve, reject) => {
if (!stream.readable) return resolve();
const maxSize = options.maxSize && bytes(options.maxSize);

let bufs = [];
let size = 0;
let error;

stream.on('data', onData);
stream.on('end', onEnd);
stream.on('error', onEnd);
stream.on('close', onClose);

function onData(buf) {
if (error) return;

size += buf.length;
if (maxSize && size > maxSize) {
error = new Error(`entity size exceed ${options.maxSize}`);
error.code = 'ENTITY_TOO_LARGE';
// usualy use in parse http request so we set status = 413
error.status = 413;
return;
}
bufs.push(buf);
}

function onEnd(err) {
if (err instanceof Error) error = err;
done();
}

function onClose() {
done();
}

function done() {

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这个要 once?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

promise 只会 resolve 一次,这个多次调用也无所谓的

cleanup();
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

放到前面了

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

额,这个还不能放前面,bufs 被清空了。问题不大,反正都是同步的。

if (error) reject(error);
else resolve(Buffer.concat(bufs));
}

function cleanup() {
bufs = [];
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

bufs = null

error = null;
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

bufs 和 error 都不需要设置吧

stream.removeListener('data', onData);
stream.removeListener('end', onEnd);
stream.removeListener('error', onEnd);
stream.removeListener('close', onClose);
}
});
};
40 changes: 40 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
{
"name": "stream-to-buf",
"description": "convert stream to buffer",
"version": "1.0.0",
"homepage": "https://github.com/node-modules/stream-to-buf",
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

stream-to-buffer 被抢。。。

"repository": {
"type": "git",
"url": "git://github.com/node-modules/stream-to-buf.git"
},
"dependencies": {
"humanize-bytes": "^1.0.1"
},
"devDependencies": {
"autod": "^3.0.1",
"egg-bin": "^4.4.0",
"egg-ci": "^1.8.0",
"eslint": "^4.18.1",
"eslint-config-egg": "^7.0.0"
},
"main": "index.js",
"files": [
"index.js"
],
"scripts": {
"lint": "eslint test *.js",
"test": "npm run lint -- --fix && npm run test-local",
"test-local": "egg-bin test",
"cov": "egg-bin cov",
"ci": "npm run lint && npm run cov",
"autod": "autod"
},
"author": "dead_horse",
"engines": {
"node": ">= 8.0.0"
},
"ci": {
"version": "8, 9"
},
"license": "MIT"
}
Binary file added test/fixtures/bigfile
Binary file not shown.
Binary file added test/fixtures/smallfile
Binary file not shown.
51 changes: 51 additions & 0 deletions test/index.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
'use strict';

const streamToBuffer = require('../');
const assert = require('assert');
const path = require('path');
const fs = require('fs');

describe('stream-to-buf', () => {
it('should collect buffer success', async () => {
const small = fs.createReadStream(path.join(__dirname, 'fixtures/smallfile'));
let buffer = await streamToBuffer(small);
assert(buffer.length === 1024);

const big = fs.createReadStream(path.join(__dirname, 'fixtures/bigfile'));
buffer = await streamToBuffer(big);
assert(buffer.length === 102400);
});

it('should return undefined if not readable', async () => {
const buffer = await streamToBuffer('foo');
assert(!buffer);
});

it('should collect buffer with maxSize', async () => {
const small = fs.createReadStream(path.join(__dirname, 'fixtures/smallfile'));
const big = fs.createReadStream(path.join(__dirname, 'fixtures/bigfile'));
const buffer = await streamToBuffer(small, { maxSize: '2kb' });
assert(buffer.length === 1024);
try {
await streamToBuffer(big, { maxSize: '2kb' });
throw new Error('should not execute');
} catch (err) {
assert(err.message === 'entity size exceed 2kb');
assert(err.code === 'ENTITY_TOO_LARGE');
assert(err.status === 413);
}
});

it('should throw end by error', async () => {
const stream = fs.createReadStream(path.join(__dirname, 'fixtures/bigfile'));
setImmediate(() => {
stream.emit('error', new Error('mock error'));
});
try {
await streamToBuffer(stream);
throw new Error('should not execute');
} catch (err) {
assert(err.message === 'mock error');
}
});
});