Skip to content
This repository has been archived by the owner on Jul 3, 2019. It is now read-only.

Commit

Permalink
test(api): filled out a bunch of test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Mar 2, 2017
1 parent dfaed8f commit 626e293
Show file tree
Hide file tree
Showing 7 changed files with 539 additions and 11 deletions.
46 changes: 45 additions & 1 deletion test/content.put-stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,34 @@ test('errors if stream ends with no data', function (t) {
})
})

test('errors if input size does not match expected', function (t) {
t.plan(10)
let dig1 = null
pipe(fromString('abc'), putStream(CACHE, {
size: 5
}).on('digest', function (d) {
dig1 = d
}), function (err) {
t.ok(err, 'got an error when data smaller than expected')
t.equal(dig1, null, 'no digest returned')
t.equal(err.code, 'EBADSIZE', 'returns useful error code')
t.equal(err.expected, 5, 'error includes expected size')
t.equal(err.found, 3, 'error includes found size')
})
let dig2 = null
pipe(fromString('abcdefghi'), putStream(CACHE, {
size: 5
}).on('digest', function (d) {
dig2 = d
}), function (err) {
t.ok(err, 'got an error when data bigger than expected')
t.equal(dig2, null, 'no digest returned')
t.equal(err.code, 'EBADSIZE', 'returns useful error code')
t.equal(err.expected, 5, 'error includes expected size')
t.equal(err.found, 9, 'error includes found size')
})
})

test('does not overwrite content if already on disk', function (t) {
const CONTENT = 'foobarbaz'
const DIGEST = crypto.createHash('sha1').update(CONTENT).digest('hex')
Expand Down Expand Up @@ -181,7 +209,23 @@ test('cleans up tmp on successful completion', function (t) {
})
})

test('cleans up tmp on error')
test('cleans up tmp on error', function (t) {
const CONTENT = 'foobarbaz'
pipe(fromString(CONTENT), putStream(CACHE, { size: 1 }), function (err) {
t.ok(err, 'got an error')
t.equal(err.code, 'EBADSIZE', 'got expected code')
const tmp = path.join(CACHE, 'tmp')
fs.readdir(tmp, function (err, files) {
if (!err || (err && err.code === 'ENOENT')) {
files = files || []
t.deepEqual(files, [], 'nothing in the tmp dir!')
t.end()
} else {
throw err
}
})
})
})

test('checks the size of stream data if opts.size provided', function (t) {
const CONTENT = 'foobarbaz'
Expand Down
4 changes: 2 additions & 2 deletions test/content.read.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ test('readStream: returns a stream with cache content data', function (t) {

test('readStream: allows hashAlgorithm configuration', function (t) {
const CONTENT = 'foobarbaz'
const HASH = 'sha1'
const HASH = 'sha512'
const DIGEST = crypto.createHash(HASH).update(CONTENT).digest('hex')
const dir = {}
dir[DIGEST] = File(CONTENT)
Expand All @@ -48,7 +48,7 @@ test('readStream: allows hashAlgorithm configuration', function (t) {
let buf = ''
stream.on('data', function (data) { buf += data })
stream.on('end', function () {
t.ok(true, 'stream completed successfully, off a sha1')
t.ok(true, 'stream completed successfully, off a sha512')
t.equal(CONTENT, buf, 'cache contents read correctly')
t.end()
})
Expand Down
315 changes: 315 additions & 0 deletions test/get.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,315 @@
'use strict'

const Promise = require('bluebird')

const crypto = require('crypto')
const finished = Promise.promisify(require('mississippi').finished)
const index = require('../lib/entry-index')
const memo = require('../lib/memoization')
const path = require('path')
const rimraf = Promise.promisify(require('rimraf'))
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)

const Dir = Tacks.Dir
const File = Tacks.File

const CACHE = path.join(testDir, 'cache')
const CONTENT = bufferise('foobarbaz')
const KEY = 'my-test-key'
const ALGO = 'sha512'
const DIGEST = crypto.createHash(ALGO).update(CONTENT).digest('hex')
const METADATA = { foo: 'bar' }

var get = require('../get')

function bufferise (string) {
return Buffer.from
? Buffer.from(string, 'utf8')
: new Buffer(string, 'utf8')
}

// Simple wrapper util cause this gets WORDY
function streamGet (byDigest) {
const args = [].slice.call(arguments, 1)
let data = []
let dataLen = 0
let hashAlgorithm
let digest
let metadata
const stream = (
byDigest ? get.stream.byDigest : get.stream
).apply(null, args)
stream.on('data', d => {
data.push(d)
dataLen += d.length
}).on('hashAlgorithm', h => {
hashAlgorithm = h
}).on('digest', d => {
digest = d
}).on('metadata', m => {
metadata = m
})
return finished(stream).then(() => ({
data: Buffer.concat(data, dataLen), hashAlgorithm, digest, metadata
}))
}

test('basic bulk get', t => {
const fixture = new Tacks(Dir({
'content': Dir({
[DIGEST]: File(CONTENT)
})
}))
fixture.create(CACHE)
return index.insert(CACHE, KEY, DIGEST, {
metadata: METADATA,
hashAlgorithm: ALGO
}).then(() => {
return get(CACHE, KEY)
}).then(res => {
t.deepEqual(res, {
metadata: METADATA,
data: CONTENT,
hashAlgorithm: ALGO,
digest: DIGEST
}, 'bulk key get returned proper data')
}).then(() => {
return get.byDigest(CACHE, DIGEST, {hashAlgorithm: ALGO})
}).then(res => {
t.deepEqual(res, CONTENT, 'byDigest returned proper data')
})
})

test('basic stream get', t => {
const fixture = new Tacks(Dir({
'content': Dir({
[DIGEST]: File(CONTENT)
})
}))
fixture.create(CACHE)
return index.insert(CACHE, KEY, DIGEST, {
metadata: METADATA,
hashAlgorithm: ALGO
}).then(() => {
return Promise.join(
streamGet(false, CACHE, KEY),
streamGet(true, CACHE, DIGEST, { hashAlgorithm: ALGO }),
(byKey, byDigest) => {
t.deepEqual(byKey, {
data: CONTENT,
hashAlgorithm: ALGO,
digest: DIGEST,
metadata: METADATA
}, 'got all expected data and fields from key fetch')
t.deepEqual(
byDigest.data,
CONTENT,
'got correct data from digest fetch'
)
}
)
})
})

test('ENOENT if not found', t => {
return get(CACHE, KEY).then(() => {
throw new Error('lookup should fail')
}).catch(err => {
t.ok(err, 'got an error')
t.equal(err.code, 'ENOENT', 'error code is ENOENT')
return get.info(CACHE, KEY)
}).catch(err => {
t.ok(err, 'got an error')
t.equal(err.code, 'ENOENT', 'error code is ENOENT')
})
})

test('get.info index entry lookup', t => {
return index.insert(CACHE, KEY, DIGEST, {
metadata: METADATA,
hashAlgorithm: ALGO
}).then(ENTRY => {
return get.info(CACHE, KEY).then(entry => {
t.deepEqual(entry, ENTRY, 'get.info() returned the right entry')
})
})
})

test('memoizes data on bulk read', t => {
memo.clearMemoized()
const fixture = new Tacks(Dir({
'content': Dir({
[DIGEST]: File(CONTENT)
})
}))
fixture.create(CACHE)
return index.insert(CACHE, KEY, DIGEST, {
metadata: METADATA,
hashAlgorithm: ALGO
}).then(ENTRY => {
return get(CACHE, KEY).then(() => {
t.deepEqual(memo.get(CACHE, KEY), null, 'no memoization!')
return get(CACHE, KEY, { memoize: true })
}).then(res => {
t.deepEqual(res, {
metadata: METADATA,
data: CONTENT,
hashAlgorithm: ALGO,
digest: DIGEST
}, 'usual data returned')
t.deepEqual(memo.get(CACHE, KEY), {
entry: ENTRY,
data: CONTENT
}, 'data inserted into memoization cache')
return rimraf(CACHE)
}).then(() => {
return get(CACHE, KEY)
}).then(res => {
t.deepEqual(res, {
metadata: METADATA,
data: CONTENT,
hashAlgorithm: ALGO,
digest: DIGEST
}, 'memoized data fetched by default')
return get(CACHE, KEY, { memoize: false }).then(() => {
throw new Error('expected get to fail')
}).catch(err => {
t.ok(err, 'got an error from unmemoized get')
t.equal(err.code, 'ENOENT', 'cached content not found')
t.deepEqual(memo.get(CACHE, KEY), {
entry: ENTRY,
data: CONTENT
}, 'data still in memoization cache')
})
})
})
})

test('memoizes data on stream read', t => {
memo.clearMemoized()
const fixture = new Tacks(Dir({
'content': Dir({
[DIGEST]: File(CONTENT)
})
}))
fixture.create(CACHE)
return index.insert(CACHE, KEY, DIGEST, {
metadata: METADATA,
hashAlgorithm: ALGO
}).then(ENTRY => {
return Promise.join(
streamGet(false, CACHE, KEY),
streamGet(true, CACHE, DIGEST, { hashAlgorithm: ALGO }),
() => {
t.deepEqual(memo.get(CACHE, KEY), null, 'no memoization by key!')
t.deepEqual(
memo.get.byDigest(CACHE, DIGEST, ALGO),
null,
'no memoization by digest!'
)
}
).then(() => {
memo.clearMemoized()
return streamGet(true, CACHE, DIGEST, {
memoize: true,
hashAlgorithm: ALGO
})
}).then(byDigest => {
t.deepEqual(byDigest.data, CONTENT, 'usual data returned from stream')
t.deepEqual(memo.get(CACHE, KEY), null, 'digest fetch = no key entry')
t.deepEqual(
memo.get.byDigest(CACHE, DIGEST, ALGO),
CONTENT,
'content memoized'
)
t.deepEqual(
memo.get.byDigest(CACHE, DIGEST, 'sha1'),
null,
'content memoization filtered by hashAlgo'
)
t.deepEqual(
memo.get.byDigest('whatev', DIGEST, ALGO),
null,
'content memoization filtered by cache'
)
}).then(() => {
memo.clearMemoized()
return streamGet(false, CACHE, KEY, { memoize: true })
}).then(byKey => {
t.deepEqual(byKey, {
metadata: METADATA,
data: CONTENT,
hashAlgorithm: ALGO,
digest: DIGEST
}, 'usual data returned from key fetch')
t.deepEqual(memo.get(CACHE, KEY), {
entry: ENTRY,
data: CONTENT
}, 'data inserted into memoization cache')
t.deepEqual(
memo.get.byDigest(CACHE, DIGEST, ALGO),
CONTENT,
'content memoized by digest, too'
)
t.deepEqual(
memo.get('whatev', KEY),
null,
'entry memoization filtered by cache'
)
}).then(() => {
return rimraf(CACHE)
}).then(() => {
return Promise.join(
streamGet(false, CACHE, KEY),
streamGet(true, CACHE, DIGEST, { hashAlgorithm: ALGO }),
(byKey, byDigest) => {
t.deepEqual(byKey, {
metadata: METADATA,
data: CONTENT,
hashAlgorithm: ALGO,
digest: DIGEST
}, 'key fetch fulfilled by memoization cache')
t.deepEqual(
byDigest.data,
CONTENT,
'digest fetch fulfilled by memoization cache'
)
}
)
}).then(() => {
return Promise.join(
streamGet(false, CACHE, KEY, {
memoize: false
}).catch(err => err),
streamGet(true, CACHE, DIGEST, {
hashAlgorithm: ALGO,
memoize: false
}).catch(err => err),
(keyErr, digestErr) => {
t.equal(keyErr.code, 'ENOENT', 'key get memoization bypassed')
t.equal(keyErr.code, 'ENOENT', 'digest get memoization bypassed')
}
)
})
})
})

test('get.info uses memoized data', t => {
memo.clearMemoized()
const ENTRY = {
key: KEY,
digest: DIGEST,
hashAlgorithm: ALGO,
time: +(new Date()),
metadata: null
}
memo.put(CACHE, ENTRY, CONTENT)
return get.info(CACHE, KEY).then(info => {
t.deepEqual(info, ENTRY, 'got the entry from memoization cache')
})
})

test('identical hashes with different algorithms do not conflict')
test('throw error if something is really wrong with bucket')
Loading

0 comments on commit 626e293

Please sign in to comment.