Skip to content
This repository has been archived by the owner on Jul 3, 2019. It is now read-only.

Commit

Permalink
test(bench): added basic get + read benchmarks
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Mar 5, 2017
1 parent 8584ae2 commit db18a85
Show file tree
Hide file tree
Showing 2 changed files with 175 additions and 0 deletions.
78 changes: 78 additions & 0 deletions test/benchmarks/content.read.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
'use strict'

const CacheContent = require('../util/cache-content')
const crypto = require('crypto')
const Tacks = require('tacks')

const read = require('../../lib/content/read')

let buf = []
for (let i = 0; i < Math.pow(2, 8); i++) {
buf.push(Buffer.alloc ? Buffer.alloc(8, i) : new Buffer(8))
}

const CONTENT = Buffer.concat(buf, buf.length * 8)
const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')

const arr = []
for (let i = 0; i < 100; i++) {
arr.push(CONTENT)
}
const BIGCONTENT = Buffer.concat(arr, CONTENT.length * 1000)
const BIGDIGEST = crypto.createHash('sha512').update(BIGCONTENT).digest('hex')

module.exports = (suite, CACHE) => {
suite.add('content.read()', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[DIGEST]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
read(
CACHE, DIGEST
).then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
})

suite.add('content.read.stream() small data', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[DIGEST]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
const stream = read.stream(CACHE, DIGEST)
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
deferred.resolve()
})
}
})

suite.add('content.read.stream() big data', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[BIGDIGEST]: BIGCONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
const stream = read.stream(CACHE, BIGDIGEST)
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
deferred.resolve()
})
}
})
}
97 changes: 97 additions & 0 deletions test/benchmarks/get.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
'use strict'

const CacheContent = require('../util/cache-content')
const crypto = require('crypto')
const memo = require('../../lib/memoization')
const Tacks = require('tacks')

const get = require('../../get')

let buf = []
for (let i = 0; i < Math.pow(2, 8); i++) {
buf.push(Buffer.alloc ? Buffer.alloc(8, i) : new Buffer(8))
}

const CONTENT = Buffer.concat(buf, buf.length * 8)
const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')

const arr = []
for (let i = 0; i < 100; i++) {
arr.push(CONTENT)
}
const BIGCONTENT = Buffer.concat(arr, CONTENT.length * 1000)
const BIGDIGEST = crypto.createHash('sha512').update(BIGCONTENT).digest('hex')

module.exports = (suite, CACHE) => {
suite.add('get.byDigest()', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[DIGEST]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
get.byDigest(
CACHE, DIGEST
).then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
})

suite.add('get.byDigest() memoized', {
defer: true,
setup () {
memo.put.byDigest(CACHE, DIGEST, 'sha512', CONTENT)
},
fn (deferred) {
get.byDigest(
CACHE, DIGEST
).then(
() => deferred.resolve(),
err => deferred.reject(err)
)
},
tearDown () {
memo.clearMemoized()
}
})

suite.add('get.stream.byDigest() small data', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[DIGEST]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
const stream = get.stream.byDigest(CACHE, DIGEST, { memoize: false })
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
deferred.resolve()
})
}
})

suite.add('get.stream() big data', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[BIGDIGEST]: BIGCONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
const stream = get.stream.byDigest(CACHE, BIGDIGEST)
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
deferred.resolve()
})
}
})
}

0 comments on commit db18a85

Please sign in to comment.