Skip to content

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
achingbrain committed May 11, 2019
0 parents commit 33bdb02
Show file tree
Hide file tree
Showing 6 changed files with 209 additions and 0 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
node_modules
*.log
.DS_Store
.nyc_output
coverage
8 changes: 8 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
sudo: required
language: node_js

node_js:
- '8'
- '10'

script: npm run coveralls
41 changes: 41 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# readable-stream-buffer-stream

[![Build status](https://travis-ci.org/achingbrain/readable-stream-buffer-stream.svg?branch=master)](https://travis-ci.org/achingbrain/readable-stream-buffer-stream?branch=master) [![Coverage Status](https://coveralls.io/repos/github/achingbrain/readable-stream-buffer-stream/badge.svg?branch=master)](https://coveralls.io/github/achingbrain/readable-stream-buffer-stream?branch=master) [![Dependencies Status](https://david-dm.org/achingbrain/readable-stream-buffer-stream/status.svg)](https://david-dm.org/achingbrain/readable-stream-buffer-stream)

> An async iterator that emits buffers containing bytes up to a certain length
## Install

```sh
$ npm install --save readable-stream-buffer-stream
```

## Usage

```javascript
const totalLength = //... a big number

// all options are optional, defaults are shown
const options = {
chunkSize: 4096, // how many bytes will be in each buffer
generator: (size, callback) => {
// call the passed callback with a Buffer object `size` bytes long.
//
// if omitted, `Promise.resolve(crypto.randomBytes(size))` will be used
}
}

let buffers = []

const stream = bufferStream(totalLength, options)
stream.on('data', (buf) => {
buffers.push(buf)
})
stream.on('end', (buf) => {
if (buf) {
buffers.push(buf)
}

// `buffers` is an array of Buffers the combined length of which === totalLength
})
```
49 changes: 49 additions & 0 deletions index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
'use strict'

const crypto = require('crypto')
const Readable = require('stream').Readable

const defaultOptions = {
chunkSize: 4096,
generator: (size, callback) => {
callback(null, crypto.randomBytes(size))
}
}

function bufferStream (limit, options = {}) {
options = Object.assign({}, defaultOptions, options)
let emitted = 0

class BufferStream extends Readable {
_read () {
const nextLength = emitted + options.chunkSize
let nextChunkSize = options.chunkSize

if (nextLength > limit) {
nextChunkSize = limit - emitted
}

options.generator(nextChunkSize, (err, bytes) => {
if (err) {
this.emit('error', err)
return
}

bytes = bytes.slice(0, nextChunkSize)

emitted += nextChunkSize

this.push(bytes)

if (nextLength > limit) {
// we've finished, end the stream
this.push(null)
}
})
}
}

return new BufferStream()
}

module.exports = bufferStream
27 changes: 27 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"name": "readable-stream-buffer-stream",
"version": "0.0.1",
"description": "A readable stream that emits buffers containing bytes up to a certain length",
"main": "index.js",
"scripts": {
"test": "nyc --check-coverage --lines 100 --reporter html --reporter lcov ava",
"lint": "standard",
"coveralls": "npm test && cat ./coverage/lcov.info | coveralls"
},
"author": "Alex Potsides <alex@achingbrain.net>",
"license": "ISC",
"repository": {
"type": "git",
"url": "git+https://github.com/achingbrain/readable-stream-buffer-stream.git"
},
"bugs": {
"url": "https://github.com/achingbrain/readable-stream-buffer-stream/issues"
},
"homepage": "https://github.com/achingbrain/readable-stream-buffer-stream#readme",
"devDependencies": {
"ava": "^1.4.1",
"coveralls": "^3.0.2",
"nyc": "^14.0.0",
"standard": "^12.0.1"
}
}
79 changes: 79 additions & 0 deletions test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import bufferStream from './'
import test from 'ava'

test.cb('Should emit bytes', (t) => {
const expected = 100
const buffers = []

const stream = bufferStream(expected)
stream.on('data', (buf) => {
buffers.push(buf)
})
stream.on('end', () => {
t.is(buffers.length, 1)
t.is(buffers[0].length, expected)
t.end()
})
})

test.cb('Should emit a number of buffers', (t) => {
const expected = 100
const chunkSize = 10
const buffers = []

const stream = bufferStream(expected, {
chunkSize
})
stream.on('data', (buf) => {
buffers.push(buf)
})
stream.on('end', () => {
t.is(buffers.length, 10)
t.is(buffers[0].length, expected / chunkSize)

const total = buffers.reduce((acc, cur) => acc + cur.length, 0)

t.is(expected, total)
t.end()
})
})

test.cb('Should allow generation of buffers', (t) => {
const expected = 100
let emitted = Buffer.alloc(0)
const buffers = []

const stream = bufferStream(expected, {
generator: (size, callback) => {
const output = Buffer.alloc(size, 1)
emitted = Buffer.concat([emitted, output])

callback(null, output)
}
})
stream.on('data', (buf) => {
buffers.push(buf)
})
stream.on('end', () => {
t.deepEqual(emitted, buffers[0])
t.end()
})
})

test.cb('Should proagate byte generation errors', (t) => {
const expected = 100
const generationError = new Error('Urk!')

const stream = bufferStream(expected, {
generator: (size, callback) => {
callback(generationError)
}
})
stream.on('data', () => {
throw new Error('No error was thrown')
})
stream.on('error', (err) => {
t.is(err, generationError)
t.end()
})
})

0 comments on commit 33bdb02

Please sign in to comment.