Skip to content
This repository has been archived by the owner on Feb 8, 2023. It is now read-only.

Commit

Permalink
feat: init stream to buffer (#1)
Browse files Browse the repository at this point in the history
  • Loading branch information
dead-horse committed Feb 28, 2018
1 parent fdd5499 commit 3a99f5f
Show file tree
Hide file tree
Showing 11 changed files with 225 additions and 0 deletions.
17 changes: 17 additions & 0 deletions .autod.conf.js
@@ -0,0 +1,17 @@
'use strict';

module.exports = {
write: true,
prefix: '^',
test: [
'test',
'benchmark',
],
devdep: [
'egg-bin',
'egg-ci',
'autod',
'eslint',
'eslint-config-egg',
],
};
3 changes: 3 additions & 0 deletions .eslintrc
@@ -0,0 +1,3 @@
{
"extends": "eslint-config-egg"
}
7 changes: 7 additions & 0 deletions .gitignore
@@ -0,0 +1,7 @@
logs/
npm-debug.log
node_modules/
coverage/
.idea/
.DS_Store
*.swp
11 changes: 11 additions & 0 deletions .travis.yml
@@ -0,0 +1,11 @@
sudo: false
language: node_js
node_js:
- '8'
- '9'
install:
- npm i npminstall && npminstall
script:
- npm run ci
after_script:
- npminstall codecov && codecov
19 changes: 19 additions & 0 deletions README.md
Expand Up @@ -2,3 +2,22 @@ stream-to-buf
====

Collect a readable stream's data into buffer. Support max size limit.

## Install

```bash
npm i stream-to-buf
```

## Usage

```js
const streamTobuffer = require('stream-to-buf');

const stream = fs.createReadStream(path.join(__dirname, 'fixtures/file'));
streamToBuffer(stream, { maxSize: '10kb' }).then(onBuffer, onError);
```

### Options

- `maxSize`: If stream's emitted buffer exceed `maxSize`, it will throw an error.(but it will still consume the stream before resolve).
15 changes: 15 additions & 0 deletions appveyor.yml
@@ -0,0 +1,15 @@
environment:
matrix:
- nodejs_version: '8'
- nodejs_version: '9'

install:
- ps: Install-Product node $env:nodejs_version
- npm i npminstall && node_modules\.bin\npminstall

test_script:
- node --version
- npm --version
- npm run test

build: off
55 changes: 55 additions & 0 deletions index.js
@@ -0,0 +1,55 @@
'use strict';

const bytes = require('humanize-bytes');

module.exports = (stream, options = {}) => {
return new Promise((resolve, reject) => {
if (!stream.readable) return resolve();
const maxSize = options.maxSize && bytes(options.maxSize);

const bufs = [];
let size = 0;
let error;

stream.on('data', onData);
stream.on('end', onEnd);
stream.on('error', onEnd);
stream.on('close', onClose);

function onData(buf) {
if (error) return;

size += buf.length;
if (maxSize && size > maxSize) {
error = new Error(`entity size exceed ${options.maxSize}`);
error.code = 'ENTITY_TOO_LARGE';
// usualy use in parse http request so we set status = 413
error.status = 413;
return;
}
bufs.push(buf);
}

function onEnd(err) {
if (err instanceof Error) error = err;
done();
}

function onClose() {
done();
}

function done() {
cleanup();
if (error) reject(error);
else resolve(Buffer.concat(bufs));
}

function cleanup() {
stream.removeListener('data', onData);
stream.removeListener('end', onEnd);
stream.removeListener('error', onEnd);
stream.removeListener('close', onClose);
}
});
};
40 changes: 40 additions & 0 deletions package.json
@@ -0,0 +1,40 @@
{
"name": "stream-to-buf",
"description": "convert stream to buffer",
"version": "1.0.0",
"homepage": "https://github.com/node-modules/stream-to-buf",
"repository": {
"type": "git",
"url": "git://github.com/node-modules/stream-to-buf.git"
},
"dependencies": {
"humanize-bytes": "^1.0.1"
},
"devDependencies": {
"autod": "^3.0.1",
"egg-bin": "^4.4.0",
"egg-ci": "^1.8.0",
"eslint": "^4.18.1",
"eslint-config-egg": "^7.0.0"
},
"main": "index.js",
"files": [
"index.js"
],
"scripts": {
"lint": "eslint test *.js",
"test": "npm run lint -- --fix && npm run test-local",
"test-local": "egg-bin test",
"cov": "egg-bin cov",
"ci": "npm run lint && npm run cov",
"autod": "autod"
},
"author": "dead_horse",
"engines": {
"node": ">= 8.0.0"
},
"ci": {
"version": "8, 9"
},
"license": "MIT"
}
Binary file added test/fixtures/bigfile
Binary file not shown.
Binary file added test/fixtures/smallfile
Binary file not shown.
58 changes: 58 additions & 0 deletions test/index.test.js
@@ -0,0 +1,58 @@
'use strict';

const streamToBuffer = require('../');
const assert = require('assert');
const path = require('path');
const fs = require('fs');

describe('stream-to-buf', () => {
it('should collect buffer success', async () => {
const small = fs.createReadStream(path.join(__dirname, 'fixtures/smallfile'));
let buffer = await streamToBuffer(small);
assert(buffer.length === 1024);

const big = fs.createReadStream(path.join(__dirname, 'fixtures/bigfile'));
buffer = await streamToBuffer(big);
assert(buffer.length === 102400);
});

it('should collect buffer when end and close both emitted', async () => {
const small = fs.createReadStream(path.join(__dirname, 'fixtures/smallfile'));
small.once('end', () => small.emit('close'));
const buffer = await streamToBuffer(small);
assert(buffer.length === 1024);
});

it('should return undefined if not readable', async () => {
const buffer = await streamToBuffer('foo');
assert(!buffer);
});

it('should collect buffer with maxSize', async () => {
const small = fs.createReadStream(path.join(__dirname, 'fixtures/smallfile'));
const big = fs.createReadStream(path.join(__dirname, 'fixtures/bigfile'));
const buffer = await streamToBuffer(small, { maxSize: '2kb' });
assert(buffer.length === 1024);
try {
await streamToBuffer(big, { maxSize: '2kb' });
throw new Error('should not execute');
} catch (err) {
assert(err.message === 'entity size exceed 2kb');
assert(err.code === 'ENTITY_TOO_LARGE');
assert(err.status === 413);
}
});

it('should throw end by error', async () => {
const stream = fs.createReadStream(path.join(__dirname, 'fixtures/bigfile'));
setImmediate(() => {
stream.emit('error', new Error('mock error'));
});
try {
await streamToBuffer(stream);
throw new Error('should not execute');
} catch (err) {
assert(err.message === 'mock error');
}
});
});

0 comments on commit 3a99f5f

Please sign in to comment.