diff --git a/README.md b/README.md
index 2e712df..6e34271 100644
--- a/README.md
+++ b/README.md
@@ -35,6 +35,8 @@ can just as easily be used on its own
* [`clearMemoized`](#clear-memoized)
* [`tmp.mkdir`](#tmp-mkdir)
* [`tmp.withTmp`](#with-tmp)
+ * Integrity
+ * [Subresource Integrity](#integrity)
* [`verify`](#verify)
* [`verify.lastRun`](#verify-last-run)
@@ -49,7 +51,7 @@ const cachePath = '/tmp/my-toy-cache'
const key = 'my-unique-key-1234'
// Cache it! Use `cachePath` as the root of the content cache
-cacache.put(cachePath, key, '10293801983029384').then(digest => {
+cacache.put(cachePath, key, '10293801983029384').then(integrity => {
console.log(`Saved content to ${cachePath}.`)
})
@@ -66,7 +68,7 @@ cacache.get.stream(
})
// The same thing, but skip the key index.
-cacache.get.byDigest(cachePath, tarballSha512).then(data => {
+cacache.get.byDigest(cachePath, integrityHash).then(data => {
fs.writeFile(destination, data, err => {
console.log('tarball data fetched based on its sha512sum and written out!')
})
@@ -107,8 +109,7 @@ cacache.ls(cachePath).then(console.log)
{
'my-thing': {
key: 'my-thing',
- digest: 'deadbeef',
- hashAlgorithm: 'sha512',
+ integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
path: '.testcache/content/deadbeef', // joined with `cachePath`
time: 12345698490,
metadata: {
@@ -119,8 +120,7 @@ cacache.ls(cachePath).then(console.log)
},
'other-thing': {
key: 'other-thing',
- digest: 'bada55',
- hashAlgorithm: 'whirlpool',
+ integrity: 'sha1-ANothER+hasH=',
path: '.testcache/content/bada55',
time: 11992309289
}
@@ -141,8 +141,7 @@ cacache.ls.stream(cachePath).on('data', console.log)
// Output
{
key: 'my-thing',
- digest: 'deadbeef',
- hashAlgorithm: 'sha512',
+ integrity: 'sha512-BaSe64HaSh',
path: '.testcache/content/deadbeef', // joined with `cachePath`
time: 12345698490,
metadata: {
@@ -154,8 +153,7 @@ cacache.ls.stream(cachePath).on('data', console.log)
{
key: 'other-thing',
- digest: 'bada55',
- hashAlgorithm: 'whirlpool',
+ integrity: 'whirlpool-WoWSoMuchSupport',
path: '.testcache/content/bada55',
time: 11992309289
}
@@ -165,20 +163,23 @@ cacache.ls.stream(cachePath).on('data', console.log)
}
```
-#### `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, digest, hashAlgorithm})`
+#### `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
Returns an object with the cached data, digest, and metadata identified by
`key`. The `data` property of this object will be a `Buffer` instance that
presumably holds some data that means something to you. I'm sure you know what
-to do with it! cacache just won't care. `hashAlgorithm` is the algorithm used
-to calculate the `digest` of the content. This algorithm must be used if you
-fetch later with `get.byDigest`.
+to do with it! cacache just won't care.
+
+`integrity` is a [Subresource
+Integrity](#integrity)
+string. That is, a string that can be used to verify `data`, which looks like
+`-`.
If there is no content identified by `key`, or if the locally-stored data does
not pass the validity checksum, the promise will be rejected.
A sub-function, `get.byDigest` may be used for identical behavior, except lookup
-will happen by content digest, bypassing the index entirely. This version of the
+will happen by integrity hash, bypassing the index entirely. This version of the
function *only* returns `data` itself, without any wrapper.
##### Note
@@ -197,15 +198,12 @@ cache.get(cachePath, 'my-thing').then(console.log)
metadata: {
thingName: 'my'
},
- digest: 'deadbeef',
- hashAlgorithm: 'sha512'
+ integrity: 'sha512-BaSe64HaSh',
data: Buffer#
}
// Look up by digest
-cache.get.byDigest(cachePath, 'deadbeef', {
- hashAlgorithm: 'sha512'
-}).then(console.log)
+cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
// Output:
Buffer#
```
@@ -217,12 +215,12 @@ Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_s
If there is no content identified by `key`, or if the locally-stored data does
not pass the validity checksum, an error will be emitted.
-`metadata` and `digest` events will be emitted before the stream closes, if
+`metadata` and `integrity` events will be emitted before the stream closes, if
you need to collect that extra data about the cached entry.
A sub-function, `get.stream.byDigest` may be used for identical behavior,
-except lookup will happen by content digest, bypassing the index entirely. This
-version does not emit the `metadata` and `digest` events at all.
+except lookup will happen by integrity hash, bypassing the index entirely. This
+version does not emit the `metadata` and `integrity` events at all.
##### Example
@@ -232,21 +230,18 @@ cache.get.stream(
cachePath, 'my-thing'
).on('metadata', metadata => {
console.log('metadata:', metadata)
-}).on('hashAlgorithm', algo => {
- console.log('hashAlgorithm:', algo)
-}).on('digest', digest => {
- console.log('digest:', digest)
+}).on('integrity', integrity => {
+ console.log('integrity:', integrity)
}).pipe(
fs.createWriteStream('./x.tgz')
)
// Outputs:
metadata: { ... }
-hashAlgorithm: 'sha512'
-digest: deadbeef
+integrity: 'sha512-SoMeDIGest+64=='
// Look up by digest
cache.get.stream.byDigest(
- cachePath, 'deadbeef', { hashAlgorithm: 'sha512' }
+ cachePath, 'sha512-SoMeDIGest+64=='
).pipe(
fs.createWriteStream('./x.tgz')
)
@@ -260,8 +255,7 @@ one exists.
##### Fields
* `key` - Key the entry was looked up under. Matches the `key` argument.
-* `digest` - Content digest the entry refers to.
-* `hashAlgorithm` - Hashing algorithm used to generate `digest`.
+* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
* `path` - Filesystem path relative to `cache` argument where content is stored.
* `time` - Timestamp the entry was first added on.
* `metadata` - User-assigned metadata associated with the entry/content.
@@ -274,7 +268,7 @@ cacache.get.info(cachePath, 'my-thing').then(console.log)
// Output
{
key: 'my-thing',
- digest: 'deadbeef',
+ integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
path: '.testcache/content/deadbeef',
time: 12345698490,
metadata: {
@@ -298,8 +292,8 @@ fetch(
'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
).then(data => {
return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
-}).then(digest => {
- console.log('digest is', digest)
+}).then(integrity => {
+ console.log('integrity hash is', integrity)
})
```
@@ -307,7 +301,7 @@ fetch(
Returns a [Writable
Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
-data written to it into the cache. Emits a `digest` event with the digest of
+data written to it into the cache. Emits an `integrity` event with the digest of
written contents when it succeeds.
##### Example
@@ -318,7 +312,7 @@ request.get(
).pipe(
cacache.put.stream(
cachePath, 'registry.npmjs.org|cacache@1.0.0'
- ).on('digest', d => console.log('digest is ${d}'))
+ ).on('integrity', d => console.log(`integrity digest is ${d}`))
)
```
@@ -336,22 +330,23 @@ If provided, the data stream will be verified to check that enough data was
passed through. If there's more or less data than expected, insertion will fail
with an `EBADSIZE` error.
-##### `digest`
+##### `integrity`
If present, the pre-calculated digest for the inserted content. If this option
if provided and does not match the post-insertion digest, insertion will fail
with an `EBADCHECKSUM` error.
-To control the hashing algorithm, use `opts.hashAlgorithm`.
+`hashAlgorithm` has no effect if this option is present.
##### `hashAlgorithm`
Default: 'sha512'
-Hashing algorithm to use when calculating the digest for inserted data. Can use
-any algorithm listed in `crypto.getHashes()` or `'omakase'`/`'お任せします'` to
-pick a random hash algorithm on each insertion. You may also use any anagram of
-`'modnar'` to use this feature.
+Hashing algorithm to use when calculating the [subresource integrity
+digest](#integrity)
+for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
+`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
+may also use any anagram of `'modnar'` to use this feature.
##### `uid`/`gid`
@@ -395,6 +390,10 @@ Alias: `cacache.rm`
Removes the index entry for `key`. Content will still be accessible if
requested directly by content address ([`get.stream.byDigest`](#get-stream)).
+To remove the content itself (which might still be used by other entries), use
+[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
+[`verify`](#verify).
+
##### Example
```javascript
@@ -403,16 +402,16 @@ cacache.rm.entry(cachePath, 'my-thing').then(() => {
})
```
-#### `> cacache.rm.content(cache, digest) -> Promise`
+#### `> cacache.rm.content(cache, integrity) -> Promise`
-Removes the content identified by `digest`. Any index entries referring to it
+Removes the content identified by `integrity`. Any index entries referring to it
will not be usable again until the content is re-added to the cache with an
identical digest.
##### Example
```javascript
-cacache.rm.content(cachePath, 'deadbeef').then(() => {
+cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
console.log('data for my-thing is gone!')
})
```
@@ -462,6 +461,46 @@ cacache.tmp.withTmp(cache, dir => {
})
```
+#### Subresource Integrity Digests
+
+For content verification and addressing, cacache uses strings following the
+[Subresource
+Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity)
+spec. That is, any time cacache expects an `integrity` argument or option, it
+should be in the format `-`.
+
+One deviation from the current spec is that cacache will support any hash
+algorithms supported by the underlying Node.js process. You can use
+`crypto.getHashes()` to see which ones you can use.
+
+##### Generating Digests Yourself
+
+If you have an existing content shasum, they are generally formatted as a
+hexadecimal string (that is, a sha1 would look like:
+`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
+cacache, you'll need to convert this to an equivalent subresource integrity
+string. For this example, the corresponding hash would be:
+`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
+
+If you want to generate an integrity string yourself for existing data, you can
+use something like this:
+
+```javascript
+const crypto = require('crypto')
+const hashAlgorithm = 'sha512'
+const data = 'foobarbaz'
+
+const integrity = (
+ hashAlgorithm +
+ '-' +
+ crypto.createHash(hashAlgorithm).update(data).digest('base64')
+)
+```
+
+You can also use [`ssri`](https://npm.im) to have a richer set of functionality
+around SRI strings, including generation, parsing, and translating from existing
+hex-formatted strings.
+
#### `> cacache.verify(cache, opts) -> Promise`
Checks out and fixes up your cache:
@@ -469,7 +508,7 @@ Checks out and fixes up your cache:
* Cleans up corrupted or invalid index entries.
* Custom entry filtering options.
* Garbage collects any content entries not referenced by the index.
-* Checks digests for all content entries and removes invalid content.
+* Checks integrity for all content entries and removes invalid content.
* Fixes cache ownership.
* Removes the `tmp` directory in the cache and all its contents.
diff --git a/get.js b/get.js
index 829b397..9d4c396 100644
--- a/get.js
+++ b/get.js
@@ -17,18 +17,16 @@ module.exports.byDigest = function getByDigest (cache, digest, opts) {
}
function getData (byDigest, cache, key, opts) {
opts = opts || {}
- opts.hashAlgorithm = opts.hashAlgorithm || 'sha512'
const memoized = (
byDigest
- ? memo.get.byDigest(cache, key, opts.hashAlgorithm)
+ ? memo.get.byDigest(cache, key)
: memo.get(cache, key)
)
if (memoized && opts.memoize !== false) {
return BB.resolve(byDigest ? memoized : {
metadata: memoized.entry.metadata,
data: memoized.data,
- digest: memoized.entry.digest,
- hashAlgorithm: memoized.entry.hashAlgorithm
+ integrity: memoized.entry.integrity
})
}
return (
@@ -37,17 +35,16 @@ function getData (byDigest, cache, key, opts) {
if (!entry && !byDigest) {
throw new index.NotFoundError(cache, key)
}
- return read(cache, byDigest ? key : entry.digest, {
- hashAlgorithm: byDigest ? opts.hashAlgorithm : entry.hashAlgorithm,
+ return read(cache, byDigest ? key : entry.integrity, {
+ integrity: opts.integrity,
size: opts.size
}).then(data => byDigest ? data : {
metadata: entry.metadata,
data: data,
- digest: entry.digest,
- hashAlgorithm: entry.hashAlgorithm
+ integrity: entry.integrity
}).then(res => {
if (opts.memoize && byDigest) {
- memo.put.byDigest(cache, key, opts.hashAlgorithm, res)
+ memo.put.byDigest(cache, key, res)
} else if (opts.memoize) {
memo.put(cache, entry, res.data)
}
@@ -64,8 +61,7 @@ function getStream (cache, key, opts) {
if (memoized && opts.memoize !== false) {
stream.on('newListener', function (ev, cb) {
ev === 'metadata' && cb(memoized.entry.metadata)
- ev === 'digest' && cb(memoized.entry.digest)
- ev === 'hashAlgorithm' && cb(memoized.entry.hashAlgorithm)
+ ev === 'integrity' && cb(memoized.entry.integrity)
})
stream.write(memoized.data, () => stream.end())
return stream
@@ -91,18 +87,14 @@ function getStream (cache, key, opts) {
} else {
memoStream = through()
}
- // TODO - don't overwrite someone else's `opts`.
- opts.hashAlgorithm = entry.hashAlgorithm
stream.emit('metadata', entry.metadata)
- stream.emit('hashAlgorithm', entry.hashAlgorithm)
- stream.emit('digest', entry.digest)
+ stream.emit('integrity', entry.integrity)
stream.on('newListener', function (ev, cb) {
ev === 'metadata' && cb(entry.metadata)
- ev === 'digest' && cb(entry.digest)
- ev === 'hashAlgorithm' && cb(entry.hashAlgorithm)
+ ev === 'integrity' && cb(entry.integrity)
})
pipe(
- read.readStream(cache, entry.digest, opts),
+ read.readStream(cache, entry.integrity, opts),
memoStream,
stream
)
@@ -111,16 +103,15 @@ function getStream (cache, key, opts) {
}
module.exports.stream.byDigest = getStreamDigest
-function getStreamDigest (cache, digest, opts) {
+function getStreamDigest (cache, integrity, opts) {
opts = opts || {}
- opts.hashAlgorithm = opts.hashAlgorithm || 'sha512'
- const memoized = memo.get.byDigest(cache, digest, opts.hashAlgorithm)
+ const memoized = memo.get.byDigest(cache, integrity)
if (memoized && opts.memoize !== false) {
const stream = through()
stream.write(memoized, () => stream.end())
return stream
} else {
- let stream = read.readStream(cache, digest, opts)
+ let stream = read.readStream(cache, integrity, opts)
if (opts.memoize) {
let memoData = []
let memoLength = 0
@@ -131,8 +122,7 @@ function getStreamDigest (cache, digest, opts) {
}, cb => {
memoData && memo.put.byDigest(
cache,
- digest,
- opts.hashAlgorithm,
+ integrity,
Buffer.concat(memoData, memoLength)
)
cb()
diff --git a/lib/content/path.js b/lib/content/path.js
index 0d2ea44..fa6491b 100644
--- a/lib/content/path.js
+++ b/lib/content/path.js
@@ -1,21 +1,23 @@
'use strict'
-var contentVer = require('../../package.json')['cache-version'].content
-var hashToSegments = require('../util/hash-to-segments')
-var path = require('path')
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
// Current format of content file path:
//
-// ~/.my-cache/content-v1/sha512/ba/bada55deadbeefc0ffee
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
//
module.exports = contentPath
-function contentPath (cache, address, hashAlgorithm) {
- address = address && address.toLowerCase()
- hashAlgorithm = hashAlgorithm ? hashAlgorithm.toLowerCase() : 'sha512'
+function contentPath (cache, integrity) {
+ const sri = ssri.parse(integrity, {single: true})
+ // contentPath is the *strongest* algo given
return path.join.apply(path, [
contentDir(cache),
- hashAlgorithm
- ].concat(hashToSegments(address)))
+ sri.algorithm
+ ].concat(hashToSegments(sri.hexDigest())))
}
module.exports._contentDir = contentDir
diff --git a/lib/content/read.js b/lib/content/read.js
index a8a0252..23bc013 100644
--- a/lib/content/read.js
+++ b/lib/content/read.js
@@ -2,57 +2,86 @@
const BB = require('bluebird')
-const checksumStream = require('checksum-stream')
const contentPath = require('./path')
-const crypto = require('crypto')
const fs = require('graceful-fs')
-const pipeline = require('mississippi').pipeline
+const PassThrough = require('stream').PassThrough
+const pipe = BB.promisify(require('mississippi').pipe)
+const ssri = require('ssri')
BB.promisifyAll(fs)
module.exports = read
-function read (cache, address, opts) {
+function read (cache, integrity, opts) {
opts = opts || {}
- const algo = opts.hashAlgorithm || 'sha512'
- const cpath = contentPath(cache, address, algo)
- return fs.readFileAsync(cpath, null).then(data => {
- const digest = crypto.createHash(algo).update(data).digest('hex')
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (digest !== address) {
- throw checksumError(address, digest)
- } else {
- return data
- }
+ return pickContentSri(cache, integrity).then(sri => {
+ const cpath = contentPath(cache, sri)
+ return fs.readFileAsync(cpath, null).then(data => {
+ if (typeof opts.size === 'number' && opts.size !== data.length) {
+ throw sizeError(opts.size, data.length)
+ } else if (ssri.checkData(data, sri)) {
+ return data
+ } else {
+ throw checksumError(sri, null)
+ }
+ })
})
}
module.exports.stream = readStream
module.exports.readStream = readStream
-function readStream (cache, address, opts) {
+function readStream (cache, integrity, opts) {
opts = opts || {}
- const cpath = contentPath(cache, address, opts.hashAlgorithm || 'sha512')
- return pipeline(
- fs.createReadStream(cpath), checksumStream({
- digest: address,
- algorithm: opts.hashAlgorithm || 'sha512',
- size: opts.size
- })
- )
+ const stream = new PassThrough()
+ pickContentSri(
+ cache, integrity
+ ).then(sri => {
+ return pipe(
+ fs.createReadStream(contentPath(cache, sri)),
+ ssri.integrityStream({
+ integrity: sri,
+ size: opts.size
+ }),
+ stream
+ )
+ }).catch(err => {
+ stream.emit('error', err)
+ })
+ return stream
}
module.exports.hasContent = hasContent
-function hasContent (cache, address, algorithm) {
- if (!address) { return BB.resolve(false) }
- return fs.lstatAsync(
- contentPath(cache, address, algorithm || 'sha512')
- ).then(() => true)
+function hasContent (cache, integrity) {
+ if (!integrity) { return BB.resolve(false) }
+ return pickContentSri(cache, integrity, true)
.catch({code: 'ENOENT'}, () => false)
.catch({code: 'EPERM'}, err => {
if (process.platform !== 'win32') {
throw err
+ } else {
+ return false
}
- })
+ }).then(sri => sri || false)
+}
+
+module.exports._pickContentSri = pickContentSri
+function pickContentSri (cache, integrity, checkFs) {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ if (checkFs) {
+ return fs.lstatAsync(cpath).then(() => digests[0])
+ } else {
+ return BB.resolve(digests[0])
+ }
+ } else {
+ return BB.any(sri[sri.pickAlgorithm()].map(meta => {
+ return pickContentSri(cache, meta, true)
+ }))
+ }
}
function sizeError (expected, found) {
@@ -63,10 +92,10 @@ function sizeError (expected, found) {
return err
}
-function checksumError (expected, found) {
- var err = new Error('checksum failed')
+function checksumError (sri, path) {
+ var err = new Error(`Checksum failed for ${sri} (${path})`)
err.code = 'EBADCHECKSUM'
- err.expected = expected
- err.found = found
+ err.sri = sri
+ err.path = path
return err
}
diff --git a/lib/content/rm.js b/lib/content/rm.js
index 218b427..4893b9a 100644
--- a/lib/content/rm.js
+++ b/lib/content/rm.js
@@ -1,13 +1,16 @@
'use strict'
-var BB = require('bluebird')
+const BB = require('bluebird')
-var contentPath = require('./path')
-var rimraf = BB.promisify(require('rimraf'))
+const contentPath = require('./path')
+const hasContent = require('./read').hasContent
+const rimraf = BB.promisify(require('rimraf'))
module.exports = rm
-function rm (cache, address, algorithm) {
- address = address.toLowerCase()
- algorithm = algorithm && algorithm.toLowerCase()
- return rimraf(contentPath(cache, address, algorithm || 'sha512'))
+function rm (cache, integrity) {
+ return hasContent(cache, integrity).then(sri => {
+ if (sri) {
+ return rimraf(contentPath(cache, sri))
+ }
+ })
}
diff --git a/lib/content/write.js b/lib/content/write.js
index a89194c..0cfbaa5 100644
--- a/lib/content/write.js
+++ b/lib/content/write.js
@@ -2,15 +2,14 @@
const BB = require('bluebird')
-const checksumStream = require('checksum-stream')
const contentPath = require('./path')
-const crypto = require('crypto')
const fixOwner = require('../util/fix-owner')
const fs = require('graceful-fs')
const moveFile = require('../util/move-file')
const path = require('path')
const pipe = require('mississippi').pipe
const rimraf = BB.promisify(require('rimraf'))
+const ssri = require('ssri')
const through = require('mississippi').through
const to = require('mississippi').to
const uniqueFilename = require('unique-filename')
@@ -20,22 +19,25 @@ const writeFileAsync = BB.promisify(fs.writeFile)
module.exports = write
function write (cache, data, opts) {
opts = opts || {}
- const digest = crypto.createHash(
- opts.hashAlgorithm || 'sha512'
- ).update(data).digest('hex')
+ if (opts.algorithms && opts.algorithms.length > 1) {
+ throw new Error(
+ 'opts.algorithms only supports a single algorithm for now'
+ )
+ }
if (typeof opts.size === 'number' && data.length !== opts.size) {
return BB.reject(sizeError(opts.size, data.length))
}
- if (opts.digest && digest !== opts.digest) {
- return BB.reject(checksumError(opts.digest, digest))
+ const sri = ssri.fromData(data, opts)
+ if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
+ return BB.reject(checksumError(opts.integrity, sri))
}
return BB.using(makeTmp(cache, opts), tmp => (
writeFileAsync(
tmp.target, data, {flag: 'wx'}
).then(() => (
- moveToDestination(tmp, cache, digest, opts)
+ moveToDestination(tmp, cache, sri, opts)
))
- )).then(() => digest)
+ )).then(() => sri)
}
module.exports.stream = writeStream
@@ -60,8 +62,8 @@ function writeStream (cache, opts) {
e.code = 'ENODATA'
return ret.emit('error', e)
}
- allDone.then(digest => {
- digest && ret.emit('digest', digest)
+ allDone.then(sri => {
+ sri && ret.emit('integrity', sri)
cb()
}, e => {
ret.emit('error', e)
@@ -79,22 +81,22 @@ function handleContent (inputStream, cache, opts, errCheck) {
errCheck()
return pipeToTmp(
inputStream, cache, tmp.target, opts, errCheck
- ).then(digest => {
+ ).then(sri => {
return moveToDestination(
- tmp, cache, digest, opts, errCheck
- ).then(() => digest)
+ tmp, cache, sri, opts, errCheck
+ ).then(() => sri)
})
})
}
function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
- let digest
- const hashStream = checksumStream({
- digest: opts.digest,
- algorithm: opts.hashAlgorithm || 'sha512',
+ let sri
+ const hashStream = ssri.integrityStream({
+ integrity: opts.integrity,
+ algorithms: opts.algorithms,
size: opts.size
- }).on('digest', d => {
- digest = d
+ }).on('integrity', s => {
+ sri = s
})
let outStream = new BB((resolve, reject) => {
@@ -113,7 +115,7 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
if (err) {
rimraf(tmpTarget).then(() => reject(err), reject)
} else {
- resolve(digest)
+ resolve(sri)
}
})
})
@@ -130,9 +132,9 @@ function makeTmp (cache, opts) {
})).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
}
-function moveToDestination (tmp, cache, digest, opts, errCheck) {
+function moveToDestination (tmp, cache, sri, opts, errCheck) {
errCheck && errCheck()
- const destination = contentPath(cache, digest, opts.hashAlgorithm)
+ const destination = contentPath(cache, sri)
const destDir = path.dirname(destination)
return fixOwner.mkdirfix(
diff --git a/lib/entry-index.js b/lib/entry-index.js
index 087dbfd..6631327 100644
--- a/lib/entry-index.js
+++ b/lib/entry-index.js
@@ -1,13 +1,15 @@
'use strict'
+const BB = require('bluebird')
+
const contentPath = require('./content/path')
const crypto = require('crypto')
const fixOwner = require('./util/fix-owner')
const fs = require('graceful-fs')
-const path = require('path')
-const BB = require('bluebird')
-const ms = require('mississippi')
const hashToSegments = require('./util/hash-to-segments')
+const ms = require('mississippi')
+const path = require('path')
+const ssri = require('ssri')
const indexV = require('../package.json')['cache-version'].index
@@ -27,14 +29,13 @@ module.exports.NotFoundError = class NotFoundError extends Error {
}
module.exports.insert = insert
-function insert (cache, key, digest, opts) {
+function insert (cache, key, integrity, opts) {
opts = opts || {}
const bucket = bucketPath(cache, key)
const entry = {
- key: key,
- digest: digest,
- hashAlgorithm: opts.hashAlgorithm || 'sha512',
- time: +(new Date()),
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: Date.now(),
metadata: opts.metadata
}
return fixOwner.mkdirfix(
@@ -85,8 +86,8 @@ function find (cache, key) {
}
module.exports.delete = del
-function del (cache, key) {
- return insert(cache, key, null)
+function del (cache, key, opts) {
+ return insert(cache, key, null, opts)
}
module.exports.lsStream = lsStream
@@ -200,12 +201,11 @@ function hash (str, digest) {
function formatEntry (cache, entry) {
// Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.digest) { return null }
+ if (!entry.integrity) { return null }
return {
key: entry.key,
- digest: entry.digest,
- hashAlgorithm: entry.hashAlgorithm,
- path: contentPath(cache, entry.digest, entry.hashAlgorithm),
+ integrity: entry.integrity,
+ path: contentPath(cache, entry.integrity),
time: entry.time,
metadata: entry.metadata
}
diff --git a/lib/memoization.js b/lib/memoization.js
index 0e94c2a..2d2837a 100644
--- a/lib/memoization.js
+++ b/lib/memoization.js
@@ -28,12 +28,12 @@ function clearMemoized () {
module.exports.put = put
function put (cache, entry, data) {
MEMOIZED[`key:${cache}:${entry.key}`] = { entry, data }
- putDigest(cache, entry.digest, entry.hashAlgorithm, data)
+ putDigest(cache, entry.integrity, data)
}
module.exports.put.byDigest = putDigest
-function putDigest (cache, digest, algo, data) {
- MEMOIZED[`digest:${cache}:${algo}:${digest}`] = data
+function putDigest (cache, integrity, data) {
+ MEMOIZED[`digest:${cache}:${integrity}`] = data
}
module.exports.get = get
@@ -42,6 +42,6 @@ function get (cache, key) {
}
module.exports.get.byDigest = getDigest
-function getDigest (cache, digest, algo) {
- return MEMOIZED[`digest:${cache}:${algo}:${digest}`]
+function getDigest (cache, integrity) {
+ return MEMOIZED[`digest:${cache}:${integrity}`]
}
diff --git a/lib/verify.js b/lib/verify.js
index f136afa..ad2c892 100644
--- a/lib/verify.js
+++ b/lib/verify.js
@@ -2,7 +2,6 @@
const BB = require('bluebird')
-const checksumStream = require('checksum-stream')
const contentPath = require('./content/path')
const finished = BB.promisify(require('mississippi').finished)
const fixOwner = require('./util/fix-owner')
@@ -12,6 +11,7 @@ const index = require('./entry-index')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const rimraf = BB.promisify(require('rimraf'))
+const ssri = require('ssri')
BB.promisifyAll(fs)
@@ -65,7 +65,7 @@ function fixPerms (cache, opts) {
//
// The algorithm is basically as follows:
// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each algo/digest combo as "live"
+// 2. Mark each integrity value as "live"
// 3. Read entire filesystem tree in `content-vX/` dir
// 4. If content is live, verify its checksum and delete it if it fails
// 5. If content is not marked as live, rimraf it.
@@ -76,7 +76,7 @@ function garbageCollect (cache, opts) {
const liveContent = new Set()
indexStream.on('data', entry => {
if (opts && opts.filter && !opts.filter(entry)) { return }
- liveContent.add(`${entry.hashAlgorithm}-${entry.digest}`)
+ liveContent.add(entry.integrity.toString())
})
return finished(indexStream).then(() => {
const contentDir = contentPath._contentDir(cache)
@@ -95,8 +95,9 @@ function garbageCollect (cache, opts) {
const split = f.split(/[/\\]/)
const digest = split.slice(split.length - 3).join('')
const algo = split[split.length - 4]
- if (liveContent.has(`${algo}-${digest}`)) {
- return verifyContent(f, digest, algo).then(info => {
+ const integrity = ssri.fromHex(digest, algo)
+ if (liveContent.has(integrity.toString())) {
+ return verifyContent(f, integrity).then(info => {
if (!info.valid) {
stats.reclaimedCount++
stats.badContentCount++
@@ -122,16 +123,17 @@ function garbageCollect (cache, opts) {
})
}
-function verifyContent (filepath, digest, algorithm) {
+function verifyContent (filepath, sri) {
return fs.statAsync(filepath).then(stat => {
- const reader = fs.createReadStream(filepath)
- const checksummer = checksumStream({digest, algorithm})
const contentInfo = {
size: stat.size,
valid: true
}
- checksummer.on('data', () => {})
- return pipe(reader, checksummer).catch({code: 'EBADCHECKSUM'}, () => {
+ return ssri.checkStream(
+ fs.createReadStream(filepath),
+ sri
+ ).catch(err => {
+ if (err.code !== 'EBADCHECKSUM') { throw err }
return rimraf(filepath).then(() => {
contentInfo.valid = false
})
@@ -178,12 +180,11 @@ function rebuildBucket (cache, bucket, stats, opts) {
// This needs to be serialized because cacache explicitly
// lets very racy bucket conflicts clobber each other.
return BB.mapSeries(bucket, entry => {
- const content = contentPath(cache, entry.digest, entry.hashAlgorithm)
+ const content = contentPath(cache, entry.integrity)
return fs.statAsync(content).then(() => {
- return index.insert(cache, entry.key, entry.digest, {
+ return index.insert(cache, entry.key, entry.integrity, {
uid: opts.uid,
gid: opts.gid,
- hashAlgorithm: entry.hashAlgorithm,
metadata: entry.metadata
}).then(() => { stats.totalEntries++ })
}).catch({code: 'ENOENT'}, () => {
diff --git a/package.json b/package.json
index 98f02b6..2e7a511 100644
--- a/package.json
+++ b/package.json
@@ -3,7 +3,7 @@
"version": "6.3.0",
"cache-version": {
"content": "2",
- "index": "3"
+ "index": "4"
},
"description": "General content-addressable cache system that maintains a filesystem registry of file data.",
"main": "index.js",
@@ -46,7 +46,6 @@
],
"license": "CC0-1.0",
"dependencies": {
- "move-concurrently": "^1.0.0",
"bluebird": "^3.4.7",
"checksum-stream": "^1.0.2",
"chownr": "^1.0.1",
@@ -55,8 +54,10 @@
"lru-cache": "^4.0.2",
"mississippi": "^1.2.0",
"mkdirp": "^0.5.1",
+ "move-concurrently": "^1.0.0",
"promise-inflight": "^1.0.1",
"rimraf": "^2.6.1",
+ "ssri": "^3.0.0",
"unique-filename": "^1.1.0"
},
"devDependencies": {
diff --git a/put.js b/put.js
index f0ce9f7..88c5f3b 100644
--- a/put.js
+++ b/put.js
@@ -8,12 +8,12 @@ const to = require('mississippi').to
module.exports = putData
function putData (cache, key, data, opts) {
opts = opts || {}
- return write(cache, data, opts).then(digest => {
- return index.insert(cache, key, digest, opts).then(entry => {
+ return write(cache, data, opts).then(integrity => {
+ return index.insert(cache, key, integrity, opts).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, data)
}
- return digest
+ return integrity
})
})
}
@@ -21,9 +21,9 @@ function putData (cache, key, data, opts) {
module.exports.stream = putStream
function putStream (cache, key, opts) {
opts = opts || {}
- let digest
- const contentStream = write.stream(cache, opts).on('digest', d => {
- digest = d
+ let integrity
+ const contentStream = write.stream(cache, opts).on('integrity', int => {
+ integrity = int
})
let memoData
let memoTotal = 0
@@ -38,11 +38,11 @@ function putStream (cache, key, opts) {
})
}, cb => {
contentStream.end(() => {
- index.insert(cache, key, digest, opts).then(entry => {
+ index.insert(cache, key, integrity, opts).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, Buffer.concat(memoData, memoTotal))
}
- stream.emit('digest', digest)
+ stream.emit('integrity', integrity)
cb()
})
})
diff --git a/rm.js b/rm.js
index 58b48ca..e71a1d2 100644
--- a/rm.js
+++ b/rm.js
@@ -16,9 +16,9 @@ function entry (cache, key) {
}
module.exports.content = content
-function content (cache, address) {
+function content (cache, integrity) {
memo.clearMemoized()
- return rmContent(cache, address)
+ return rmContent(cache, integrity)
}
module.exports.all = all
diff --git a/test/benchmarks/content.read.js b/test/benchmarks/content.read.js
index 99247c4..fccfc11 100644
--- a/test/benchmarks/content.read.js
+++ b/test/benchmarks/content.read.js
@@ -1,8 +1,8 @@
'use strict'
const CacheContent = require('../util/cache-content')
-const crypto = require('crypto')
const Tacks = require('tacks')
+const ssri = require('ssri')
const read = require('../../lib/content/read')
@@ -12,27 +12,27 @@ for (let i = 0; i < Math.pow(2, 8); i++) {
}
const CONTENT = Buffer.concat(buf, buf.length * 8)
-const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+const INTEGRITY = ssri.fromData(CONTENT)
const arr = []
for (let i = 0; i < 100; i++) {
arr.push(CONTENT)
}
const BIGCONTENT = Buffer.concat(arr, CONTENT.length * 1000)
-const BIGDIGEST = crypto.createHash('sha512').update(BIGCONTENT).digest('hex')
+const BIGINTEGRITY = ssri.fromData(BIGCONTENT)
module.exports = (suite, CACHE) => {
suite.add('content.read()', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
+ [INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
read(
- CACHE, DIGEST
+ CACHE, INTEGRITY
).then(
() => deferred.resolve(),
err => deferred.reject(err)
@@ -44,13 +44,13 @@ module.exports = (suite, CACHE) => {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [BIGDIGEST]: BIGCONTENT
+ [BIGINTEGRITY]: BIGCONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
read(
- CACHE, BIGDIGEST
+ CACHE, BIGINTEGRITY
).then(
() => deferred.resolve(),
err => deferred.reject(err)
@@ -62,12 +62,12 @@ module.exports = (suite, CACHE) => {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
+ [INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
- const stream = read.stream(CACHE, DIGEST)
+ const stream = read.stream(CACHE, INTEGRITY)
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
@@ -80,12 +80,12 @@ module.exports = (suite, CACHE) => {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [BIGDIGEST]: BIGCONTENT
+ [BIGINTEGRITY]: BIGCONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
- const stream = read.stream(CACHE, BIGDIGEST)
+ const stream = read.stream(CACHE, BIGINTEGRITY)
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
diff --git a/test/benchmarks/get.js b/test/benchmarks/get.js
index 709e092..2ae03cd 100644
--- a/test/benchmarks/get.js
+++ b/test/benchmarks/get.js
@@ -1,9 +1,9 @@
'use strict'
const CacheContent = require('../util/cache-content')
-const crypto = require('crypto')
const memo = require('../../lib/memoization')
const Tacks = require('tacks')
+const ssri = require('ssri')
const get = require('../../get')
@@ -13,27 +13,27 @@ for (let i = 0; i < Math.pow(2, 8); i++) {
}
const CONTENT = Buffer.concat(buf, buf.length * 8)
-const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+const INTEGRITY = ssri.fromData(CONTENT)
const arr = []
for (let i = 0; i < 100; i++) {
arr.push(CONTENT)
}
const BIGCONTENT = Buffer.concat(arr, CONTENT.length * 1000)
-const BIGDIGEST = crypto.createHash('sha512').update(BIGCONTENT).digest('hex')
+const BIGINTEGRITY = ssri.fromData(BIGCONTENT)
module.exports = (suite, CACHE) => {
suite.add('get.byDigest()', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
+ [INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
get.byDigest(
- CACHE, DIGEST
+ CACHE, INTEGRITY
).then(
() => deferred.resolve(),
err => deferred.reject(err)
@@ -44,11 +44,11 @@ module.exports = (suite, CACHE) => {
suite.add('get.byDigest() memoized', {
defer: true,
setup () {
- memo.put.byDigest(CACHE, DIGEST, 'sha512', CONTENT)
+ memo.put.byDigest(CACHE, INTEGRITY, CONTENT)
},
fn (deferred) {
get.byDigest(
- CACHE, DIGEST
+ CACHE, INTEGRITY
).then(
() => deferred.resolve(),
err => deferred.reject(err)
@@ -63,12 +63,12 @@ module.exports = (suite, CACHE) => {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
+ [INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
- const stream = get.stream.byDigest(CACHE, DIGEST, { memoize: false })
+ const stream = get.stream.byDigest(CACHE, INTEGRITY, { memoize: false })
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
@@ -81,12 +81,12 @@ module.exports = (suite, CACHE) => {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
- [BIGDIGEST]: BIGCONTENT
+ [BIGINTEGRITY]: BIGCONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
- const stream = get.stream.byDigest(CACHE, BIGDIGEST)
+ const stream = get.stream.byDigest(CACHE, BIGINTEGRITY)
stream.on('data', () => {})
stream.on('error', err => deferred.reject(err))
stream.on('end', () => {
diff --git a/test/benchmarks/index.find.js b/test/benchmarks/index.find.js
index a334863..e237c8e 100644
--- a/test/benchmarks/index.find.js
+++ b/test/benchmarks/index.find.js
@@ -19,8 +19,7 @@ module.exports = (suite, CACHE) => {
onStart () {
const entry = {
key: 'whatever',
- digest: 'deadbeef',
- hashAlgorithm: 'whatnot',
+ integrity: 'sha512-deadbeef',
time: 12345,
metadata: 'omgsometa'
}
diff --git a/test/benchmarks/index.insert.js b/test/benchmarks/index.insert.js
index 36c0bdb..9f2a08f 100644
--- a/test/benchmarks/index.insert.js
+++ b/test/benchmarks/index.insert.js
@@ -1,7 +1,7 @@
'use strict'
const KEY = 'foo'
-const DIGEST = 'deadbeef'
+const INTEGRITY = 'sha512-deadbeef'
const ALGO = 'whatnot'
const index = require('../../lib/entry-index')
@@ -10,9 +10,8 @@ module.exports = (suite, CACHE) => {
suite.add('index.insert() different files', {
defer: true,
fn (deferred) {
- index.insert(CACHE, KEY + this.count, DIGEST, {
- metadata: 'foo',
- hashAlgorithm: ALGO
+ index.insert(CACHE, KEY + this.count, INTEGRITY, {
+ metadata: 'foo'
}).then(
() => deferred.resolve(),
err => deferred.reject(err)
@@ -22,7 +21,7 @@ module.exports = (suite, CACHE) => {
suite.add('index.insert() same file', {
defer: true,
fn (deferred) {
- index.insert(CACHE, KEY, DIGEST, {
+ index.insert(CACHE, KEY, INTEGRITY, {
metadata: 'foo',
hashAlgorithm: ALGO
}).then(
diff --git a/test/content.read.js b/test/content.read.js
index 34f9160..514f85c 100644
--- a/test/content.read.js
+++ b/test/content.read.js
@@ -3,9 +3,9 @@
const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')
-const crypto = require('crypto')
const finished = BB.promisify(require('mississippi').finished)
const path = require('path')
+const ssri = require('ssri')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
@@ -17,30 +17,30 @@ const read = require('../lib/content/read')
test('read: returns a BB with cache content data', function (t) {
const CONTENT = Buffer.from('foobarbaz')
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+ const INTEGRITY = ssri.fromData(CONTENT)
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
+ [INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
- return read(CACHE, DIGEST).then(data => {
+ return read(CACHE, INTEGRITY).then(data => {
t.deepEqual(data, CONTENT, 'cache contents read correctly')
})
})
test('read.stream: returns a stream with cache content data', function (t) {
const CONTENT = Buffer.from('foobarbaz')
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+ const INTEGRITY = ssri.fromData(CONTENT)
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
+ [INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
- const stream = read.stream(CACHE, DIGEST)
+ const stream = read.stream(CACHE, INTEGRITY)
stream.on('error', function (e) { throw e })
let buf = ''
stream.on('data', function (data) { buf += data })
return BB.join(
finished(stream).then(() => Buffer.from(buf)),
- read(CACHE, DIGEST),
+ read(CACHE, INTEGRITY, {size: CONTENT.length}),
(fromStream, fromBulk) => {
t.deepEqual(fromStream, CONTENT, 'stream data checks out')
t.deepEqual(fromBulk, CONTENT, 'promise data checks out')
@@ -51,20 +51,18 @@ test('read.stream: returns a stream with cache content data', function (t) {
test('read: allows hashAlgorithm configuration', function (t) {
const CONTENT = Buffer.from('foobarbaz')
const HASH = 'whirlpool'
- const DIGEST = crypto.createHash(HASH).update(CONTENT).digest('hex')
+ const INTEGRITY = ssri.fromData(CONTENT, {algorithms: [HASH]})
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, HASH))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- const stream = read.stream(CACHE, DIGEST, { hashAlgorithm: HASH })
+ const stream = read.stream(CACHE, INTEGRITY)
stream.on('error', function (e) { throw e })
let buf = ''
stream.on('data', function (data) { buf += data })
return BB.join(
finished(stream).then(() => Buffer.from(buf)),
- read(CACHE, DIGEST, {
- hashAlgorithm: HASH
- }),
+ read(CACHE, INTEGRITY),
(fromStream, fromBulk) => {
t.deepEqual(fromStream, CONTENT, 'stream used algorithm')
t.deepEqual(fromBulk, CONTENT, 'promise used algorithm')
@@ -73,7 +71,7 @@ test('read: allows hashAlgorithm configuration', function (t) {
})
test('read: errors if content missing', function (t) {
- const stream = read.stream(CACHE, 'whatnot')
+ const stream = read.stream(CACHE, 'sha512-whatnot')
stream.on('data', function (data) {
throw new Error('unexpected data: ' + JSON.stringify(data))
})
@@ -82,7 +80,7 @@ test('read: errors if content missing', function (t) {
})
return BB.join(
finished(stream).catch({code: 'ENOENT'}, err => err),
- read(CACHE, 'whatnot').catch({code: 'ENOENT'}, err => err),
+ read(CACHE, 'sha512-whatnot').catch({code: 'ENOENT'}, err => err),
(streamErr, bulkErr) => {
t.equal(streamErr.code, 'ENOENT', 'stream got the right error')
t.equal(bulkErr.code, 'ENOENT', 'bulk got the right error')
@@ -92,18 +90,18 @@ test('read: errors if content missing', function (t) {
test('read: errors if content fails checksum', function (t) {
const CONTENT = Buffer.from('foobarbaz')
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+ const INTEGRITY = ssri.fromData(CONTENT)
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT.slice(3) // invalid contents!
+ [INTEGRITY]: CONTENT.slice(3) // invalid contents!
}))
fixture.create(CACHE)
- const stream = read.readStream(CACHE, DIGEST)
+ const stream = read.readStream(CACHE, INTEGRITY)
stream.on('end', function () {
throw new Error('end was called even though stream errored')
})
return BB.join(
finished(stream).catch({code: 'EBADCHECKSUM'}, err => err),
- read(CACHE, DIGEST).catch({code: 'EBADCHECKSUM'}, err => err),
+ read(CACHE, INTEGRITY).catch({code: 'EBADCHECKSUM'}, err => err),
(streamErr, bulkErr) => {
t.equal(streamErr.code, 'EBADCHECKSUM', 'stream got the right error')
t.equal(bulkErr.code, 'EBADCHECKSUM', 'bulk got the right error')
@@ -113,18 +111,18 @@ test('read: errors if content fails checksum', function (t) {
test('read: errors if content size does not match size option', function (t) {
const CONTENT = Buffer.from('foobarbaz')
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+ const INTEGRITY = ssri.fromData(CONTENT)
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT.slice(3) // bad size!
+ [INTEGRITY]: CONTENT.slice(3) // invalid contents!
}))
fixture.create(CACHE)
- const stream = read.readStream(CACHE, DIGEST, { size: CONTENT.length })
+ const stream = read.readStream(CACHE, INTEGRITY, { size: CONTENT.length })
stream.on('end', function () {
throw new Error('end was called even though stream errored')
})
return BB.join(
finished(stream).catch({code: 'EBADSIZE'}, err => err),
- read(CACHE, DIGEST, {
+ read(CACHE, INTEGRITY, {
size: CONTENT.length
}).catch({code: 'EBADSIZE'}, err => err),
(streamErr, bulkErr) => {
@@ -136,15 +134,15 @@ test('read: errors if content size does not match size option', function (t) {
test('hasContent: returns true when a cache file exists', function (t) {
const fixture = new Tacks(CacheContent({
- 'deadbeef': ''
+ 'sha1-deadbeef': ''
}))
fixture.create(CACHE)
return BB.join(
- read.hasContent(CACHE, 'deadbeef').then(bool => {
+ read.hasContent(CACHE, 'sha1-deadbeef').then(bool => {
t.ok(bool, 'returned true for existing content')
}),
- read.hasContent(CACHE, 'not-there').then(bool => {
- t.notOk(bool, 'returned false for missing content')
+ read.hasContent(CACHE, 'sha1-not-there').then(bool => {
+ t.equal(bool, false, 'returned false for missing content')
})
)
})
diff --git a/test/content.rm.js b/test/content.rm.js
index c14f30e..264ed42 100644
--- a/test/content.rm.js
+++ b/test/content.rm.js
@@ -1,5 +1,6 @@
'use strict'
+const contentPath = require('../lib/content/path')
const fs = require('graceful-fs')
const path = require('path')
const BB = require('bluebird')
@@ -10,21 +11,16 @@ const testDir = require('./util/test-dir')(__filename)
BB.promisifyAll(fs)
const CACHE = path.join(testDir, 'cache')
-const Dir = Tacks.Dir
-const File = Tacks.File
+const CacheContent = require('./util/cache-content')
const rm = require('../lib/content/rm')
test('removes a content entry', function (t) {
- const fixture = new Tacks(Dir({
- 'content': Dir({
- 'de': Dir({
- 'deadbeef': File('')
- })
- })
+ const fixture = new Tacks(CacheContent({
+ 'sha1-deadbeef': ''
}))
fixture.create(CACHE)
- return rm(CACHE, 'deadbeef').then(() => (
- fs.statAsync(path.join(CACHE, 'content', 'deadbeef'))
+ return rm(CACHE, 'sha1-deadbeef').then(() => (
+ fs.statAsync(contentPath(CACHE, 'sha1-deadbeef'))
)).then(() => {
throw new Error('expected an error')
}).catch(err => {
@@ -34,10 +30,10 @@ test('removes a content entry', function (t) {
})
test('works fine if entry missing', function (t) {
- const fixture = new Tacks(Dir({}))
+ const fixture = new Tacks(CacheContent({}))
fixture.create(CACHE)
- return rm(CACHE, 'deadbeef').then(() => (
- fs.statAsync(path.join(CACHE, 'content', 'deadbeef'))
+ return rm(CACHE, 'sha1-deadbeef').then(() => (
+ fs.statAsync(contentPath(CACHE, 'sha1-deadbeef'))
)).then(() => {
throw new Error('expected an error')
}).catch(err => {
diff --git a/test/content.write.chownr.js b/test/content.write.chownr.js
index bcd81b8..a64666d 100644
--- a/test/content.write.chownr.js
+++ b/test/content.write.chownr.js
@@ -1,30 +1,31 @@
'use strict'
-var crypto = require('crypto')
-var fromString = require('./util/from-string')
-var path = require('path')
-var pipe = require('mississippi').pipe
-var requireInject = require('require-inject')
-var test = require('tap').test
-var testDir = require('./util/test-dir')(__filename)
+const fromString = require('./util/from-string')
+const path = require('path')
+const pipe = require('mississippi').pipe
+const requireInject = require('require-inject')
+const ssri = require('ssri')
+const test = require('tap').test
+const testDir = require('./util/test-dir')(__filename)
-var CACHE = path.join(testDir, 'cache')
+const CACHE = path.join(testDir, 'cache')
-var contentPath = require('../lib/content/path')
+const contentPath = require('../lib/content/path')
test('allows setting a custom uid for cache contents on write', {
- skip: !process.getuid // On a platform that doesn't support uid/gid
-}, function (t) {
- var CONTENT = 'foobarbaz'
- var DIGEST = crypto.createHash('sha1').update(CONTENT).digest('hex')
- var NEWUID = process.getuid() + 1
- var NEWGID = process.getgid() + 1
- var updatedPaths = []
- var write = requireInject('../lib/content/write', {
+ skip: process.getuid ? false : 'test only works on platforms that can set uid/gid'
+}, t => {
+ const CONTENT = 'foobarbaz'
+ const INTEGRITY = ssri.fromData(CONTENT)
+ const NEWUID = process.getuid() + 1
+ const NEWGID = process.getgid() + 1
+ const updatedPaths = []
+ const write = requireInject('../lib/content/write', {
chownr: function (p, uid, gid, cb) {
process.nextTick(function () {
- t.equal(uid, NEWUID, 'new uid set')
- t.equal(gid, NEWGID, 'new gid set')
+ const rel = path.relative(CACHE, p)
+ t.equal(uid, NEWUID, 'new uid set for ' + rel)
+ t.equal(gid, NEWGID, 'new gid set for ' + rel)
updatedPaths.push(p)
cb(null)
})
@@ -37,8 +38,8 @@ test('allows setting a custom uid for cache contents on write', {
hashAlgorithm: 'sha1'
}), function (err) {
if (err) { throw err }
- const cpath = contentPath(CACHE, DIGEST, 'sha1')
- var expectedPaths = [
+ const cpath = contentPath(CACHE, INTEGRITY)
+ const expectedPaths = [
CACHE,
path.join(CACHE, path.relative(CACHE, cpath).split(path.sep)[0]),
cpath
diff --git a/test/content.write.js b/test/content.write.js
index 3a9edcb..2012e2b 100644
--- a/test/content.write.js
+++ b/test/content.write.js
@@ -6,154 +6,152 @@ const fs = require('fs')
const path = require('path')
const pipe = require('mississippi').pipe
const rimraf = require('rimraf')
+const ssri = require('ssri')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
const CACHE = path.join(testDir, 'cache')
+const CacheContent = require('./util/cache-content')
const contentPath = require('../lib/content/path')
-const Dir = Tacks.Dir
-const File = Tacks.File
const write = require('../lib/content/write')
-test('basic put', function (t) {
+test('basic put', t => {
const CONTENT = 'foobarbaz'
// Default is sha512
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
- let foundDigest
+ const INTEGRITY = ssri.fromData(CONTENT)
+ let integrity
const src = fromString(CONTENT)
- const stream = write.stream(CACHE).on('digest', function (d) {
- foundDigest = d
+ const stream = write.stream(CACHE).on('integrity', i => {
+ integrity = i
})
- pipe(src, stream, function (err) {
+ pipe(src, stream, err => {
if (err) { throw err }
- const cpath = contentPath(CACHE, foundDigest)
+ const cpath = contentPath(CACHE, integrity)
t.plan(3)
- t.equal(foundDigest, DIGEST, 'returned digest matches expected')
- fs.lstat(cpath, function (err, stat) {
+ t.deepEqual(integrity, INTEGRITY, 'calculated integrity value matches')
+ fs.lstat(cpath, (err, stat) => {
if (err) { throw err }
t.ok(stat.isFile(), 'content inserted as a single file')
})
- fs.readFile(cpath, 'utf8', function (err, data) {
+ fs.readFile(cpath, 'utf8', (err, data) => {
if (err) { throw err }
t.equal(data, CONTENT, 'contents are identical to inserted content')
})
})
})
-test('checks input digest doesn\'t match data', function (t) {
+test('checks input digest doesn\'t match data', t => {
const CONTENT = 'foobarbaz'
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
+ const INTEGRITY = ssri.fromData(CONTENT)
t.plan(5)
- let foundDigest1
- let foundDigest2
+ let int1
+ let int2
pipe(fromString('bazbarfoo'), write.stream(CACHE, {
- digest: DIGEST
- }).on('digest', function (d) {
- foundDigest1 = d
- }), function (err) {
- t.ok(!foundDigest1, 'no digest emitted')
+ integrity: INTEGRITY
+ }).on('integrity', int => {
+ int1 = int
+ }), err => {
+ t.ok(!int1, 'no digest emitted')
t.ok(!!err, 'got an error')
t.equal(err.code, 'EBADCHECKSUM', 'returns a useful error code')
})
pipe(fromString(CONTENT), write.stream(CACHE, {
- digest: DIGEST
- }).on('digest', function (d) {
- foundDigest2 = d
- }), function (err) {
+ integrity: INTEGRITY
+ }).on('integrity', int => {
+ int2 = int
+ }), err => {
t.ok(!err, 'completed without error')
- t.equal(foundDigest2, DIGEST, 'returns a matching digest')
+ t.deepEqual(int2, INTEGRITY, 'returns a matching digest')
})
})
-test('errors if stream ends with no data', function (t) {
- let foundDigest = null
- pipe(fromString(''), write.stream(CACHE).on('digest', function (d) {
- foundDigest = d
- }), function (err) {
+test('errors if stream ends with no data', t => {
+ let integrity = null
+ pipe(fromString(''), write.stream(CACHE).on('integrity', int => {
+ integrity = int
+ }), err => {
t.ok(err, 'got an error')
- t.equal(foundDigest, null, 'no digest returned')
+ t.equal(integrity, null, 'no digest returned')
t.equal(err.code, 'ENODATA', 'returns useful error code')
t.end()
})
})
-test('errors if input size does not match expected', function (t) {
+test('errors if input size does not match expected', t => {
t.plan(10)
- let dig1 = null
+ let int1 = null
pipe(fromString('abc'), write.stream(CACHE, {
size: 5
- }).on('digest', function (d) {
- dig1 = d
- }), function (err) {
+ }).on('integrity', int => {
+ int1 = int
+ }), err => {
t.ok(err, 'got an error when data smaller than expected')
- t.equal(dig1, null, 'no digest returned')
+ t.equal(int1, null, 'no digest returned')
t.equal(err.code, 'EBADSIZE', 'returns useful error code')
t.equal(err.expected, 5, 'error includes expected size')
t.equal(err.found, 3, 'error includes found size')
})
- let dig2 = null
+ let int2 = null
pipe(fromString('abcdefghi'), write.stream(CACHE, {
size: 5
- }).on('digest', function (d) {
- dig2 = d
- }), function (err) {
+ }).on('integrity', int => {
+ int2 = int
+ }), err => {
t.ok(err, 'got an error when data bigger than expected')
- t.equal(dig2, null, 'no digest returned')
+ t.equal(int2, null, 'no digest returned')
t.equal(err.code, 'EBADSIZE', 'returns useful error code')
t.equal(err.expected, 5, 'error includes expected size')
t.equal(err.found, 9, 'error includes found size')
})
})
-test('does not overwrite content if already on disk', function (t) {
+test('does not overwrite content if already on disk', t => {
const CONTENT = 'foobarbaz'
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
- const contentDir = {}
- contentDir[DIGEST] = File('nope')
- const fixture = new Tacks(Dir({
- 'content': Dir(contentDir)
+ const INTEGRITY = ssri.fromData(CONTENT)
+ const fixture = new Tacks(CacheContent({
+ [INTEGRITY]: 'nope'
}))
fixture.create(CACHE)
t.plan(4)
- let dig1
- let dig2
+ let int1
+ let int2
// With a digest -- early short-circuiting
pipe(fromString(CONTENT), write.stream(CACHE, {
- digest: DIGEST
- }).on('digest', function (d) {
- dig1 = d
- }), function (err) {
+ integrity: INTEGRITY
+ }).on('integrity', int => {
+ int1 = int
+ }), err => {
if (err) { throw err }
- t.equal(dig1, DIGEST, 'short-circuit returns a matching digest')
- fs.readFile(path.join(CACHE, 'content', DIGEST), 'utf8', function (e, d) {
+ t.deepEqual(int1, INTEGRITY, 'short-circuit returns a matching digest')
+ fs.readFile(contentPath(CACHE, INTEGRITY), 'utf8', (e, d) => {
if (e) { throw e }
t.equal(d, 'nope', 'process short-circuited. Data not written.')
})
})
- pipe(fromString(CONTENT), write.stream(CACHE).on('digest', function (d) {
- dig2 = d
- }), function (err) {
+ pipe(fromString(CONTENT), write.stream(CACHE).on('integrity', int => {
+ int2 = int
+ }), err => {
if (err) { throw err }
- t.equal(dig2, DIGEST, 'full write returns a matching digest')
- fs.readFile(path.join(CACHE, 'content', DIGEST), 'utf8', function (e, d) {
+ t.deepEqual(int2, INTEGRITY, 'full write returns a matching digest')
+ fs.readFile(contentPath(CACHE, INTEGRITY), 'utf8', function (e, d) {
if (e) { throw e }
t.equal(d, 'nope', 'previously-written data intact - no dupe write')
})
})
})
-test('errors if input stream errors', function (t) {
+test('errors if input stream errors', t => {
const stream = fromString('foobarbaz')
.on('end', () => stream.emit('error', new Error('bleh')))
- let foundDigest
- const putter = write.stream(CACHE).on('digest', function (d) {
- foundDigest = d
+ let integrity
+ const putter = write.stream(CACHE).on('integrity', int => {
+ integrity = int
})
- pipe(stream, putter, function (err) {
+ pipe(stream, putter, err => {
t.ok(err, 'got an error')
- t.ok(!foundDigest, 'no digest returned')
+ t.ok(!integrity, 'no digest returned')
t.match(err && err.message, 'bleh', 'returns the error from input stream')
fs.readdir(testDir, (err, files) => {
if (err) { throw err }
@@ -163,29 +161,26 @@ test('errors if input stream errors', function (t) {
})
})
-test('exits normally if file already open', function (t) {
+test('exits normally if file already open', t => {
const CONTENT = 'foobarbaz'
- const DIGEST = crypto.createHash('sha512').update(CONTENT).digest('hex')
- const PATH = path.join(CACHE, 'content', DIGEST)
- const contentDir = {}
- contentDir[DIGEST] = File(CONTENT)
- const fixture = new Tacks(Dir({
- 'content': Dir(contentDir)
+ const INTEGRITY = ssri.fromData(CONTENT)
+ const fixture = new Tacks(CacheContent({
+ [INTEGRITY]: CONTENT
}))
- let foundDigest
+ let integrity
fixture.create(CACHE)
// This case would only fail on Windows, when an entry is being read.
// Generally, you'd get an EBUSY back.
- fs.open(PATH, 'r+', function (err, fd) {
+ fs.open(contentPath(CACHE, INTEGRITY), 'r+', function (err, fd) {
if (err) { throw err }
- pipe(fromString(CONTENT), write.stream(CACHE).on('digest', function (d) {
- foundDigest = d
- }), function (err) {
+ pipe(fromString(CONTENT), write.stream(CACHE).on('integrity', int => {
+ integrity = int
+ }), err => {
if (err) { throw err }
- t.equal(foundDigest, DIGEST, 'returns a matching digest')
- fs.close(fd, function (err) {
+ t.deepEqual(integrity, INTEGRITY, 'returns a matching digest')
+ fs.close(fd, err => {
if (err) { throw err }
- rimraf(PATH, function (err) {
+ rimraf(contentPath(CACHE, INTEGRITY), err => {
if (err) { throw err }
t.end()
})
@@ -194,9 +189,9 @@ test('exits normally if file already open', function (t) {
})
})
-test('cleans up tmp on successful completion', function (t) {
+test('cleans up tmp on successful completion', t => {
const CONTENT = 'foobarbaz'
- pipe(fromString(CONTENT), write.stream(CACHE), function (err) {
+ pipe(fromString(CONTENT), write.stream(CACHE), err => {
if (err) { throw err }
const tmp = path.join(CACHE, 'tmp')
fs.readdir(tmp, function (err, files) {
@@ -211,9 +206,9 @@ test('cleans up tmp on successful completion', function (t) {
})
})
-test('cleans up tmp on error', function (t) {
+test('cleans up tmp on error', t => {
const CONTENT = 'foobarbaz'
- pipe(fromString(CONTENT), write.stream(CACHE, { size: 1 }), function (err) {
+ pipe(fromString(CONTENT), write.stream(CACHE, { size: 1 }), err => {
t.ok(err, 'got an error')
t.equal(err.code, 'EBADSIZE', 'got expected code')
const tmp = path.join(CACHE, 'tmp')
@@ -229,18 +224,18 @@ test('cleans up tmp on error', function (t) {
})
})
-test('checks the size of stream data if opts.size provided', function (t) {
+test('checks the size of stream data if opts.size provided', t => {
const CONTENT = 'foobarbaz'
- let dig1, dig2, dig3
+ let int1, int2, int3
t.plan(8)
pipe(
fromString(CONTENT.slice(3)),
write.stream(CACHE, {
size: CONTENT.length
- }).on('digest', function (d) { dig1 = d }),
- function (err) {
+ }).on('integrity', int => { int1 = int }),
+ err => {
t.ok(!!err, 'got an error')
- t.ok(!dig1, 'no digest returned')
+ t.ok(!int1, 'no digest returned')
t.equal(err.code, 'EBADSIZE', 'returns a useful error code')
}
)
@@ -248,10 +243,10 @@ test('checks the size of stream data if opts.size provided', function (t) {
fromString(CONTENT + 'quux'),
write.stream(CACHE, {
size: CONTENT.length
- }).on('digest', function (d) { dig2 = d }),
- function (err) {
+ }).on('integrity', int => { int2 = int }),
+ err => {
t.ok(!!err, 'got an error')
- t.ok(!dig2, 'no digest returned')
+ t.ok(!int2, 'no digest returned')
t.equal(err.code, 'EBADSIZE', 'returns a useful error code')
}
)
@@ -259,10 +254,10 @@ test('checks the size of stream data if opts.size provided', function (t) {
fromString(CONTENT),
write.stream(CACHE, {
size: CONTENT.length
- }).on('digest', function (d) { dig3 = d }),
- function (err) {
+ }).on('integrity', int => { int3 = int }),
+ err => {
t.ifError(err, 'completed without error')
- t.ok(dig3, 'got a digest')
+ t.ok(int3, 'got a digest')
}
)
})
diff --git a/test/get.js b/test/get.js
index 462e9cd..2fe9727 100644
--- a/test/get.js
+++ b/test/get.js
@@ -3,7 +3,6 @@
const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')
-const crypto = require('crypto')
const finished = BB.promisify(require('mississippi').finished)
const index = require('../lib/entry-index')
const memo = require('../lib/memoization')
@@ -12,25 +11,24 @@ const rimraf = BB.promisify(require('rimraf'))
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
+const ssri = require('ssri')
const CacheContent = require('./util/cache-content')
const CACHE = path.join(testDir, 'cache')
const CONTENT = Buffer.from('foobarbaz', 'utf8')
const KEY = 'my-test-key'
-const ALGO = 'sha512'
-const DIGEST = crypto.createHash(ALGO).update(CONTENT).digest('hex')
+const INTEGRITY = ssri.fromData(CONTENT).toString()
const METADATA = { foo: 'bar' }
-var get = require('..').get
+const get = require('..').get
// Simple wrapper util cause this gets WORDY
function streamGet (byDigest) {
const args = [].slice.call(arguments, 1)
let data = []
let dataLen = 0
- let hashAlgorithm
- let digest
+ let integrity
let metadata
const stream = (
byDigest ? get.stream.byDigest : get.stream
@@ -38,37 +36,33 @@ function streamGet (byDigest) {
stream.on('data', d => {
data.push(d)
dataLen += d.length
- }).on('hashAlgorithm', h => {
- hashAlgorithm = h
- }).on('digest', d => {
- digest = d
+ }).on('integrity', int => {
+ integrity = ssri.stringify(int)
}).on('metadata', m => {
metadata = m
})
return finished(stream).then(() => ({
- data: Buffer.concat(data, dataLen), hashAlgorithm, digest, metadata
+ data: Buffer.concat(data, dataLen), integrity, metadata
}))
}
test('basic bulk get', t => {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(() => {
return get(CACHE, KEY)
}).then(res => {
t.deepEqual(res, {
metadata: METADATA,
data: CONTENT,
- hashAlgorithm: ALGO,
- digest: DIGEST
+ integrity: INTEGRITY
}, 'bulk key get returned proper data')
}).then(() => {
- return get.byDigest(CACHE, DIGEST, {hashAlgorithm: ALGO})
+ return get.byDigest(CACHE, INTEGRITY)
}).then(res => {
t.deepEqual(res, CONTENT, 'byDigest returned proper data')
})
@@ -76,21 +70,19 @@ test('basic bulk get', t => {
test('basic stream get', t => {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(() => {
return BB.join(
streamGet(false, CACHE, KEY),
- streamGet(true, CACHE, DIGEST, { hashAlgorithm: ALGO }),
+ streamGet(true, CACHE, INTEGRITY),
(byKey, byDigest) => {
t.deepEqual(byKey, {
data: CONTENT,
- hashAlgorithm: ALGO,
- digest: DIGEST,
+ integrity: INTEGRITY,
metadata: METADATA
}, 'got all expected data and fields from key fetch')
t.deepEqual(
@@ -117,9 +109,8 @@ test('ENOENT if not found', t => {
})
test('get.info index entry lookup', t => {
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(ENTRY => {
return get.info(CACHE, KEY).then(entry => {
t.deepEqual(entry, ENTRY, 'get.info() returned the right entry')
@@ -130,12 +121,11 @@ test('get.info index entry lookup', t => {
test('memoizes data on bulk read', t => {
memo.clearMemoized()
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(ENTRY => {
return get(CACHE, KEY).then(() => {
t.deepEqual(memo.get(CACHE, KEY), null, 'no memoization!')
@@ -144,8 +134,7 @@ test('memoizes data on bulk read', t => {
t.deepEqual(res, {
metadata: METADATA,
data: CONTENT,
- hashAlgorithm: ALGO,
- digest: DIGEST
+ integrity: INTEGRITY
}, 'usual data returned')
t.deepEqual(memo.get(CACHE, KEY), {
entry: ENTRY,
@@ -158,8 +147,7 @@ test('memoizes data on bulk read', t => {
t.deepEqual(res, {
metadata: METADATA,
data: CONTENT,
- hashAlgorithm: ALGO,
- digest: DIGEST
+ integrity: INTEGRITY
}, 'memoized data fetched by default')
return get(CACHE, KEY, { memoize: false }).then(() => {
throw new Error('expected get to fail')
@@ -178,45 +166,38 @@ test('memoizes data on bulk read', t => {
test('memoizes data on stream read', t => {
memo.clearMemoized()
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(ENTRY => {
return BB.join(
streamGet(false, CACHE, KEY),
- streamGet(true, CACHE, DIGEST, { hashAlgorithm: ALGO }),
+ streamGet(true, CACHE, INTEGRITY),
() => {
t.deepEqual(memo.get(CACHE, KEY), null, 'no memoization by key!')
t.deepEqual(
- memo.get.byDigest(CACHE, DIGEST, ALGO),
+ memo.get.byDigest(CACHE, INTEGRITY),
null,
'no memoization by digest!'
)
}
).then(() => {
memo.clearMemoized()
- return streamGet(true, CACHE, DIGEST, {
- memoize: true,
- hashAlgorithm: ALGO
+ return streamGet(true, CACHE, INTEGRITY, {
+ memoize: true
})
}).then(byDigest => {
t.deepEqual(byDigest.data, CONTENT, 'usual data returned from stream')
t.deepEqual(memo.get(CACHE, KEY), null, 'digest fetch = no key entry')
t.deepEqual(
- memo.get.byDigest(CACHE, DIGEST, ALGO),
+ memo.get.byDigest(CACHE, INTEGRITY),
CONTENT,
'content memoized'
)
t.deepEqual(
- memo.get.byDigest(CACHE, DIGEST, 'sha1'),
- null,
- 'content memoization filtered by hashAlgo'
- )
- t.deepEqual(
- memo.get.byDigest('whatev', DIGEST, ALGO),
+ memo.get.byDigest('whatev', INTEGRITY),
null,
'content memoization filtered by cache'
)
@@ -227,15 +208,14 @@ test('memoizes data on stream read', t => {
t.deepEqual(byKey, {
metadata: METADATA,
data: CONTENT,
- hashAlgorithm: ALGO,
- digest: DIGEST
+ integrity: INTEGRITY
}, 'usual data returned from key fetch')
t.deepEqual(memo.get(CACHE, KEY), {
entry: ENTRY,
data: CONTENT
}, 'data inserted into memoization cache')
t.deepEqual(
- memo.get.byDigest(CACHE, DIGEST, ALGO),
+ memo.get.byDigest(CACHE, INTEGRITY),
CONTENT,
'content memoized by digest, too'
)
@@ -249,13 +229,12 @@ test('memoizes data on stream read', t => {
}).then(() => {
return BB.join(
streamGet(false, CACHE, KEY),
- streamGet(true, CACHE, DIGEST, { hashAlgorithm: ALGO }),
+ streamGet(true, CACHE, INTEGRITY),
(byKey, byDigest) => {
t.deepEqual(byKey, {
metadata: METADATA,
data: CONTENT,
- hashAlgorithm: ALGO,
- digest: DIGEST
+ integrity: INTEGRITY
}, 'key fetch fulfilled by memoization cache')
t.deepEqual(
byDigest.data,
@@ -269,8 +248,7 @@ test('memoizes data on stream read', t => {
streamGet(false, CACHE, KEY, {
memoize: false
}).catch(err => err),
- streamGet(true, CACHE, DIGEST, {
- hashAlgorithm: ALGO,
+ streamGet(true, CACHE, INTEGRITY, {
memoize: false
}).catch(err => err),
(keyErr, digestErr) => {
@@ -286,8 +264,7 @@ test('get.info uses memoized data', t => {
memo.clearMemoized()
const ENTRY = {
key: KEY,
- digest: DIGEST,
- hashAlgorithm: ALGO,
+ integrity: INTEGRITY,
time: +(new Date()),
metadata: null
}
diff --git a/test/index.find.js b/test/index.find.js
index af46a97..af2348e 100644
--- a/test/index.find.js
+++ b/test/index.find.js
@@ -17,8 +17,7 @@ const index = require('../lib/entry-index')
test('index.find cache hit', function (t) {
const entry = {
key: 'whatever',
- digest: 'deadbeef',
- hashAlgorithm: 'whatnot',
+ integrity: 'whatnot-deadbeef',
time: 12345,
metadata: 'omgsometa'
}
@@ -32,7 +31,7 @@ test('index.find cache hit', function (t) {
t.ok(info, 'cache hit')
t.equal(
info.path,
- contentPath(CACHE, entry.digest, entry.hashAlgorithm),
+ contentPath(CACHE, entry.integrity),
'path added to info'
)
delete info.path
@@ -68,12 +67,12 @@ test('index.find key case-sensitivity', function (t) {
const fixture = new Tacks(CacheIndex({
'jsonstream': {
key: 'jsonstream',
- digest: 'lowercase',
+ integrity: 'sha1-lowercase',
time: 54321
},
'JSONStream': {
key: 'JSONStream',
- digest: 'capitalised',
+ integrity: 'sha1-capitalised',
time: 12345
}
}))
@@ -96,9 +95,8 @@ test('index.find key case-sensitivity', function (t) {
test('index.find path-breaking characters', function (t) {
const entry = {
key: ';;!registry\nhttps://registry.npmjs.org/back \\ slash@Cool™?',
- digest: 'deadbeef',
+ integrity: 'sha1-deadbeef',
time: 12345,
- hashAlgorithm: 'whatnot',
metadata: 'omgsometa'
}
const fixture = new Tacks(CacheIndex({
@@ -123,9 +121,8 @@ test('index.find extremely long keys', function (t) {
}
const entry = {
key: key,
- digest: 'deadbeef',
+ integrity: 'sha1-deadbeef',
time: 12345,
- hashAlgorithm: 'whatnot',
metadata: 'woo'
}
const fixture = new Tacks(CacheIndex({
@@ -147,14 +144,14 @@ test('index.find multiple index entries for key', function (t) {
const key = 'whatever'
const fixture = new Tacks(CacheIndex({
'whatever': [
- { key: key, digest: 'deadbeef', time: 54321 },
- { key: key, digest: 'bada55', time: 12345 }
+ { key: key, integrity: 'sha1-deadbeef', time: 54321 },
+ { key: key, integrity: 'sha1-bada55', time: 12345 }
]
}))
fixture.create(CACHE)
return index.find(CACHE, key).then(info => {
t.ok(info, 'cache hit')
- t.equal(info.digest, 'bada55', 'most recent entry wins')
+ t.equal(info.integrity, 'sha1-bada55', 'most recent entry wins')
})
})
@@ -172,7 +169,7 @@ test('index.find garbled data in index file', function (t) {
const key = 'whatever'
const stringified = JSON.stringify({
key: key,
- digest: 'deadbeef',
+ integrity: 'sha1-deadbeef',
time: 54321
})
const fixture = new Tacks(CacheIndex({
@@ -183,7 +180,7 @@ test('index.find garbled data in index file', function (t) {
fixture.create(CACHE)
return index.find(CACHE, key).then(info => {
t.ok(info, 'cache hit in spite of crash-induced fail')
- t.equal(info.digest, 'deadbeef', ' recent entry wins')
+ t.equal(info.integrity, 'sha1-deadbeef', ' recent entry wins')
})
})
@@ -191,16 +188,15 @@ test('index.find hash conflict in same bucket', function (t) {
// This... is very unlikely to happen. But hey.
const entry = {
key: 'whatever',
- digest: 'deadbeef',
- hashAlgorithm: 'whatnot',
+ integrity: 'sha1-deadbeef',
time: 12345,
metadata: 'yay'
}
const fixture = new Tacks(CacheIndex({
'whatever': [
- { key: 'ohnoes', digest: 'welp!' },
+ { key: 'ohnoes', integrity: 'sha1-welp!' },
entry,
- { key: 'nope', digest: 'bada55' }
+ { key: 'nope', integrity: 'sha1-bada55' }
]
}))
fixture.create(CACHE)
diff --git a/test/index.insert.js b/test/index.insert.js
index 251d56b..e9d0812 100644
--- a/test/index.insert.js
+++ b/test/index.insert.js
@@ -1,10 +1,11 @@
'use strict'
+const BB = require('bluebird')
+
const CacheIndex = require('./util/cache-index')
const contentPath = require('../lib/content/path')
const fs = require('fs')
const path = require('path')
-const BB = require('bluebird')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
@@ -16,18 +17,16 @@ const index = require('../lib/entry-index')
const KEY = 'foo'
const BUCKET = index._bucketPath(CACHE, KEY)
-const DIGEST = 'deadbeef'
-const ALGO = 'whatnot'
+const INTEGRITY = 'sha512-deadbeef'
test('basic insertion', function (t) {
- return index.insert(
- CACHE, KEY, DIGEST, { metadata: 'foo', hashAlgorithm: ALGO }
- ).then(entry => {
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: 'foo'
+ }).then(entry => {
t.deepEqual(entry, {
key: KEY,
- digest: DIGEST,
- hashAlgorithm: ALGO,
- path: contentPath(CACHE, DIGEST, ALGO),
+ integrity: INTEGRITY,
+ path: contentPath(CACHE, INTEGRITY),
time: entry.time,
metadata: 'foo'
}, 'formatted entry returned')
@@ -40,8 +39,7 @@ test('basic insertion', function (t) {
t.ok(entry.time, 'entry has a timestamp')
t.deepEqual(entry, {
key: KEY,
- digest: DIGEST,
- hashAlgorithm: ALGO,
+ integrity: INTEGRITY,
time: entry.time,
metadata: 'foo'
}, 'entry matches what was inserted')
@@ -49,10 +47,10 @@ test('basic insertion', function (t) {
})
test('inserts additional entries into existing key', function (t) {
- return index.insert(
- CACHE, KEY, DIGEST, {metadata: 1}
- ).then(() => (
- index.insert(CACHE, KEY, DIGEST, {metadata: 2})
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: 1
+ }).then(() => (
+ index.insert(CACHE, KEY, INTEGRITY, {metadata: 2})
)).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
@@ -62,13 +60,11 @@ test('inserts additional entries into existing key', function (t) {
entries.forEach(function (e) { delete e.time })
t.deepEqual(entries, [{
key: KEY,
- digest: DIGEST,
- hashAlgorithm: 'sha512',
+ integrity: INTEGRITY,
metadata: 1
}, {
key: KEY,
- digest: DIGEST,
- hashAlgorithm: 'sha512',
+ integrity: INTEGRITY,
metadata: 2
}], 'all entries present')
})
@@ -79,13 +75,13 @@ test('separates entries even if one is corrupted', function (t) {
const fixture = new Tacks(CacheIndex({
'foo': '\n' + JSON.stringify({
key: KEY,
- digest: 'meh',
+ integrity: 'meh',
time: 54321
}) + '\n{"key": "' + KEY + '"\noway'
}))
fixture.create(CACHE)
return index.insert(
- CACHE, KEY, DIGEST
+ CACHE, KEY, INTEGRITY
).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
@@ -93,8 +89,7 @@ test('separates entries even if one is corrupted', function (t) {
delete entry.time
t.deepEqual(entry, {
key: KEY,
- digest: DIGEST,
- hashAlgorithm: 'sha512'
+ integrity: INTEGRITY
}, 'new entry unaffected by corruption')
})
})
@@ -102,7 +97,7 @@ test('separates entries even if one is corrupted', function (t) {
test('optional arbitrary metadata', function (t) {
const metadata = { foo: 'bar' }
return index.insert(
- CACHE, KEY, DIGEST, { metadata: metadata }
+ CACHE, KEY, INTEGRITY, { metadata: metadata }
).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
@@ -110,8 +105,7 @@ test('optional arbitrary metadata', function (t) {
delete entry.time
t.deepEqual(entry, {
key: KEY,
- digest: DIGEST,
- hashAlgorithm: 'sha512',
+ integrity: INTEGRITY,
metadata: metadata
}, 'entry includes inserted metadata')
})
@@ -119,8 +113,8 @@ test('optional arbitrary metadata', function (t) {
test('key case-sensitivity', function (t) {
return BB.join(
- index.insert(CACHE, KEY, DIGEST),
- index.insert(CACHE, KEY.toUpperCase(), DIGEST + 'upper')
+ index.insert(CACHE, KEY, INTEGRITY),
+ index.insert(CACHE, KEY.toUpperCase(), INTEGRITY + 'upper')
).then(() => {
return BB.join(
index.find(CACHE, KEY),
@@ -130,17 +124,17 @@ test('key case-sensitivity', function (t) {
delete upperEntry.time
t.deepEqual({
key: entry.key,
- digest: entry.digest
+ integrity: entry.integrity
}, {
key: KEY,
- digest: DIGEST
+ integrity: INTEGRITY
}, 'regular entry exists')
t.deepEqual({
key: upperEntry.key,
- digest: upperEntry.digest
+ integrity: upperEntry.integrity
}, {
key: KEY.toUpperCase(),
- digest: DIGEST + 'upper'
+ integrity: INTEGRITY + 'upper'
}, 'case-variant entry intact')
}
)
@@ -150,7 +144,7 @@ test('key case-sensitivity', function (t) {
test('path-breaking characters', function (t) {
const newKey = ';;!registry\nhttps://registry.npmjs.org/back \\ slash@Cool™?'
return index.insert(
- CACHE, newKey, DIGEST
+ CACHE, newKey, INTEGRITY
).then(() => {
const bucket = index._bucketPath(CACHE, newKey)
return fs.readFileAsync(bucket, 'utf8')
@@ -159,8 +153,7 @@ test('path-breaking characters', function (t) {
delete entry.time
t.deepEqual(entry, {
key: newKey,
- digest: DIGEST,
- hashAlgorithm: 'sha512'
+ integrity: INTEGRITY
}, 'entry exists and matches original key with invalid chars')
})
})
@@ -171,7 +164,7 @@ test('extremely long keys', function (t) {
newKey += i
}
return index.insert(
- CACHE, newKey, DIGEST
+ CACHE, newKey, INTEGRITY
).then(() => {
const bucket = index._bucketPath(CACHE, newKey)
return fs.readFileAsync(bucket, 'utf8')
@@ -180,8 +173,7 @@ test('extremely long keys', function (t) {
delete entry.time
t.deepEqual(entry, {
key: newKey,
- digest: DIGEST,
- hashAlgorithm: 'sha512'
+ integrity: INTEGRITY
}, 'entry exists in spite of INCREDIBLY LONG key')
})
})
diff --git a/test/ls.js b/test/ls.js
index d72381c..87f23aa 100644
--- a/test/ls.js
+++ b/test/ls.js
@@ -18,15 +18,13 @@ test('basic listing', function (t) {
const contents = {
'whatever': {
key: 'whatever',
- digest: 'deadbeef',
- hashAlgorithm: 'whatnot',
+ integrity: 'sha512-deadbeef',
time: 12345,
metadata: 'omgsometa'
},
'whatnot': {
key: 'whatnot',
- digest: 'bada55',
- hashAlgorithm: 'whateva',
+ integrity: 'sha512-bada55',
time: 54321,
metadata: null
}
@@ -34,10 +32,10 @@ test('basic listing', function (t) {
const fixture = new Tacks(CacheIndex(contents))
contents.whatever.path =
contentPath(
- CACHE, contents.whatever.digest, contents.whatever.hashAlgorithm)
+ CACHE, contents.whatever.integrity)
contents.whatnot.path =
contentPath(
- CACHE, contents.whatnot.digest, contents.whatnot.hashAlgorithm)
+ CACHE, contents.whatnot.integrity)
fixture.create(CACHE)
return ls(CACHE).then(listing => {
t.deepEqual(listing, contents, 'index contents correct')
@@ -57,15 +55,13 @@ test('separate keys in conflicting buckets', function (t) {
const contents = {
'whatever': {
key: 'whatever',
- digest: 'deadbeef',
- hashAlgorithm: 'whatnot',
+ integrity: 'sha512-deadbeef',
time: 12345,
metadata: 'omgsometa'
},
'whatev': {
key: 'whatev',
- digest: 'bada55',
- hashAlgorithm: 'whateva',
+ integrity: 'sha512-bada55',
time: 54321,
metadata: null
}
@@ -76,10 +72,10 @@ test('separate keys in conflicting buckets', function (t) {
}))
contents.whatever.path =
contentPath(
- CACHE, contents.whatever.digest, contents.whatever.hashAlgorithm)
+ CACHE, contents.whatever.integrity)
contents.whatev.path =
contentPath(
- CACHE, contents.whatev.digest, contents.whatev.hashAlgorithm)
+ CACHE, contents.whatev.integrity)
fixture.create(CACHE)
return ls(CACHE).then(listing => {
t.deepEqual(listing, contents, 'index contents correct')
diff --git a/test/memoization.js b/test/memoization.js
index 7919358..e6ef1f8 100644
--- a/test/memoization.js
+++ b/test/memoization.js
@@ -7,8 +7,7 @@ const memo = require('../lib/memoization')
const CACHE = 'mycache'
const ENTRY = {
key: 'foo',
- digest: 'deadbeef',
- hashAlgorithm: 'sha512',
+ integrity: 'sha512-deadbeef',
time: new Date(),
metadata: null
}
@@ -21,7 +20,7 @@ test('memoizes entry and data by key', t => {
entry: ENTRY,
data: DATA
},
- [`digest:${CACHE}:${ENTRY.hashAlgorithm}:${ENTRY.digest}`]: DATA
+ [`digest:${CACHE}:${ENTRY.integrity}`]: DATA
}, 'cache has both key and digest entries')
t.done()
})
@@ -44,7 +43,7 @@ test('can fetch data by key', t => {
test('can fetch data by digest', t => {
memo.put(CACHE, ENTRY, DATA)
t.deepEqual(
- memo.get.byDigest(CACHE, ENTRY.digest, ENTRY.hashAlgorithm),
+ memo.get.byDigest(CACHE, ENTRY.integrity),
DATA,
'got raw data by digest, without an entry'
)
@@ -61,7 +60,7 @@ test('can clear out the memoization cache', t => {
'entry not there anymore'
)
t.deepEqual(
- memo.get.byDigest(ENTRY.digest),
+ memo.get.byDigest(ENTRY.integrity),
null,
'digest-based data not there anymore'
)
diff --git a/test/put.js b/test/put.js
index d371196..7fa80c3 100644
--- a/test/put.js
+++ b/test/put.js
@@ -3,7 +3,6 @@
const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')
-const crypto = require('crypto')
const fromString = require('./util/from-string')
const fs = BB.promisifyAll(require('fs'))
const index = require('../lib/entry-index')
@@ -12,21 +11,21 @@ const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
+const ssri = require('ssri')
const CACHE = path.join(testDir, 'cache')
const CONTENT = Buffer.from('foobarbaz', 'utf8')
const KEY = 'my-test-key'
-const ALGO = 'sha512'
-const DIGEST = crypto.createHash(ALGO).update(CONTENT).digest('hex')
+const INTEGRITY = ssri.fromData(CONTENT).toString()
const METADATA = { foo: 'bar' }
const contentPath = require('../lib/content/path')
var put = require('..').put
test('basic bulk insertion', t => {
- return put(CACHE, KEY, CONTENT).then(digest => {
- t.equal(digest, DIGEST, 'returned content digest')
- const dataPath = contentPath(CACHE, digest, ALGO)
+ return put(CACHE, KEY, CONTENT).then(integrity => {
+ t.equal(integrity.toString(), INTEGRITY, 'returned content integrity')
+ const dataPath = contentPath(CACHE, integrity)
return fs.readFileAsync(dataPath)
}).then(data => {
t.deepEqual(data, CONTENT, 'content was correctly inserted')
@@ -34,14 +33,14 @@ test('basic bulk insertion', t => {
})
test('basic stream insertion', t => {
- let foundDigest
+ let int
const src = fromString(CONTENT)
- const stream = put.stream(CACHE, KEY).on('digest', function (d) {
- foundDigest = d
+ const stream = put.stream(CACHE, KEY).on('integrity', i => {
+ int = i
})
return pipe(src, stream).then(() => {
- t.equal(foundDigest, DIGEST, 'returned digest matches expected')
- return fs.readFileAsync(contentPath(CACHE, foundDigest))
+ t.equal(int.toString(), INTEGRITY, 'returned integrity matches expected')
+ return fs.readFileAsync(contentPath(CACHE, int))
}).then(data => {
t.deepEqual(data, CONTENT, 'contents are identical to inserted content')
})
@@ -53,7 +52,7 @@ test('adds correct entry to index before finishing', t => {
}).then(entry => {
t.ok(entry, 'got an entry')
t.equal(entry.key, KEY, 'entry has the right key')
- t.equal(entry.digest, DIGEST, 'entry has the right key')
+ t.equal(entry.integrity, INTEGRITY, 'entry has the right key')
t.deepEqual(entry.metadata, METADATA, 'metadata also inserted')
})
})
@@ -61,10 +60,9 @@ test('adds correct entry to index before finishing', t => {
test('optionally memoizes data on bulk insertion', t => {
return put(CACHE, KEY, CONTENT, {
metadata: METADATA,
- hashAlgorithm: ALGO,
memoize: true
- }).then(digest => {
- t.equal(digest, DIGEST, 'digest returned as usual')
+ }).then(integrity => {
+ t.equal(integrity.toString(), INTEGRITY, 'integrity returned as usual')
return index.find(CACHE, KEY) // index.find is not memoized
}).then(entry => {
t.deepEqual(memo.get(CACHE, KEY), {
@@ -72,26 +70,23 @@ test('optionally memoizes data on bulk insertion', t => {
entry: entry
}, 'content inserted into memoization cache by key')
t.deepEqual(
- memo.get.byDigest(CACHE, DIGEST, ALGO),
+ memo.get.byDigest(CACHE, INTEGRITY),
CONTENT,
- 'content inserted into memoization cache by digest'
+ 'content inserted into memoization cache by integrity'
)
})
})
test('optionally memoizes data on stream insertion', t => {
- let foundDigest
+ let int
const src = fromString(CONTENT)
const stream = put.stream(CACHE, KEY, {
- hashAlgorithm: ALGO,
metadata: METADATA,
memoize: true
- }).on('digest', function (d) {
- foundDigest = d
- })
+ }).on('integrity', i => { int = i })
return pipe(src, stream).then(() => {
- t.equal(foundDigest, DIGEST, 'digest emitted as usual')
- return fs.readFileAsync(contentPath(CACHE, foundDigest))
+ t.equal(int.toString(), INTEGRITY, 'integrity emitted as usual')
+ return fs.readFileAsync(contentPath(CACHE, int))
}).then(data => {
t.deepEqual(data, CONTENT, 'contents are identical to inserted content')
return index.find(CACHE, KEY) // index.find is not memoized
@@ -101,9 +96,9 @@ test('optionally memoizes data on stream insertion', t => {
entry: entry
}, 'content inserted into memoization cache by key')
t.deepEqual(
- memo.get.byDigest(CACHE, DIGEST, ALGO),
+ memo.get.byDigest(CACHE, INTEGRITY),
CONTENT,
- 'content inserted into memoization cache by digest'
+ 'content inserted into memoization cache by integrity'
)
})
})
@@ -127,10 +122,10 @@ test('signals error if error writing to cache', t => {
)
})
-test('errors if input stream errors', function (t) {
- let foundDigest
- const putter = put.stream(CACHE, KEY).on('digest', function (d) {
- foundDigest = d
+test('errors if input stream errors', t => {
+ let int
+ const putter = put.stream(CACHE, KEY).on('integrity', i => {
+ int = i
})
const stream = fromString(false)
return pipe(
@@ -139,7 +134,7 @@ test('errors if input stream errors', function (t) {
throw new Error('expected error')
}).catch(err => {
t.ok(err, 'got an error')
- t.ok(!foundDigest, 'no digest returned')
+ t.ok(!int, 'no integrity returned')
t.match(
err.message,
/Invalid non-string/,
diff --git a/test/rm.js b/test/rm.js
index 3deefd2..38eeb76 100644
--- a/test/rm.js
+++ b/test/rm.js
@@ -3,20 +3,19 @@
const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')
-const crypto = require('crypto')
const fs = BB.promisifyAll(require('fs'))
const index = require('../lib/entry-index')
const path = require('path')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
+const ssri = require('ssri')
const CacheContent = require('./util/cache-content')
const CACHE = path.join(testDir, 'cache')
const CONTENT = Buffer.from('foobarbaz')
const KEY = 'my-test-key'
-const ALGO = 'sha512'
-const DIGEST = crypto.createHash(ALGO).update(CONTENT).digest('hex')
+const INTEGRITY = ssri.fromData(CONTENT)
const METADATA = { foo: 'bar' }
const contentPath = require('../lib/content/path')
@@ -26,12 +25,11 @@ const rm = require('..').rm
test('rm.entry removes entries, not content', t => {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(() => {
t.equal(rm, rm.entry, 'rm is an alias for rm.entry')
return rm.entry(CACHE, KEY)
@@ -42,7 +40,7 @@ test('rm.entry removes entries, not content', t => {
}).catch({code: 'ENOENT'}, err => {
t.match(err.message, /not found/, 'entry no longer accessible')
}).then(() => {
- return fs.readFileAsync(contentPath(CACHE, DIGEST, ALGO))
+ return fs.readFileAsync(contentPath(CACHE, INTEGRITY))
}).then(data => {
t.deepEqual(data, CONTENT, 'content remains in cache')
})
@@ -50,14 +48,13 @@ test('rm.entry removes entries, not content', t => {
test('rm.content removes content, not entries', t => {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(() => {
- return rm.content(CACHE, DIGEST)
+ return rm.content(CACHE, INTEGRITY)
}).then(() => {
return get(CACHE, KEY)
}).then(res => {
@@ -65,7 +62,7 @@ test('rm.content removes content, not entries', t => {
}).catch({code: 'ENOENT'}, err => {
t.match(err.message, /no such file/, 'entry no longer accessible')
}).then(() => {
- return fs.readFileAsync(contentPath(CACHE, DIGEST, ALGO))
+ return fs.readFileAsync(contentPath(CACHE, INTEGRITY))
}).then(() => {
throw new Error('unexpected success')
}).catch({code: 'ENOENT'}, err => {
@@ -75,12 +72,11 @@ test('rm.content removes content, not entries', t => {
test('rm.all deletes content and index dirs', t => {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
}).then(() => {
return fs.mkdirAsync(path.join(CACHE, 'tmp'))
}).then(() => {
diff --git a/test/util/cache-content.js b/test/util/cache-content.js
index 673cbbd..4e84abb 100644
--- a/test/util/cache-content.js
+++ b/test/util/cache-content.js
@@ -8,11 +8,10 @@ const Dir = Tacks.Dir
const File = Tacks.File
module.exports = CacheContent
-function CacheContent (entries, hashAlgorithm) {
- hashAlgorithm = hashAlgorithm || 'sha512'
+function CacheContent (entries) {
var tree = Dir({})
Object.keys(entries).forEach(function (k) {
- const cpath = contentPath('', k, hashAlgorithm)
+ const cpath = contentPath('', k)
const content = entries[k]
const parts = cpath.split(path.sep)
insertContent(tree, parts, content)
diff --git a/test/verify.js b/test/verify.js
index bba0243..5ce91d0 100644
--- a/test/verify.js
+++ b/test/verify.js
@@ -3,7 +3,6 @@
const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')
-const crypto = require('crypto')
const contentPath = require('../lib/content/path')
const index = require('../lib/entry-index')
const fs = BB.promisifyAll(require('graceful-fs'))
@@ -11,14 +10,14 @@ const path = require('path')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
+const ssri = require('ssri')
const CacheContent = require('./util/cache-content')
const CACHE = path.join(testDir, 'cache')
const CONTENT = Buffer.from('foobarbaz', 'utf8')
const KEY = 'my-test-key'
-const ALGO = 'sha512'
-const DIGEST = crypto.createHash(ALGO).update(CONTENT).digest('hex')
+const INTEGRITY = ssri.fromData(CONTENT)
const METADATA = { foo: 'bar' }
const BUCKET = index._bucketPath(CACHE, KEY)
@@ -26,13 +25,12 @@ const verify = require('..').verify
function mockCache () {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
return fs.mkdirAsync(path.join(CACHE, 'tmp')).then(() => {
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: METADATA,
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: METADATA
})
})
}
@@ -60,9 +58,8 @@ test('removes corrupted index entries from buckets', t => {
test('removes shadowed index entries from buckets', t => {
return mockCache().then(() => {
- return index.insert(CACHE, KEY, DIGEST, {
- metadata: 'meh',
- hashAlgorithm: ALGO
+ return index.insert(CACHE, KEY, INTEGRITY, {
+ metadata: 'meh'
}).then(newEntry => {
return verify(CACHE).then(stats => {
t.equal(stats.missingContent, 0, 'content valid because of good entry')
@@ -71,8 +68,7 @@ test('removes shadowed index entries from buckets', t => {
}).then(bucketData => {
const stringified = JSON.stringify({
key: newEntry.key,
- digest: newEntry.digest,
- hashAlgorithm: newEntry.hashAlgorithm,
+ integrity: newEntry.integrity.toString(),
time: +(bucketData.match(/"time":([0-9]+)/)[1]),
metadata: newEntry.metadata
})
@@ -91,13 +87,11 @@ test('accepts function for custom user filtering of index entries', t => {
const KEY3 = KEY + 'bbb'
return mockCache().then(() => {
return BB.join(
- index.insert(CACHE, KEY2, DIGEST, {
- metadata: 'haayyyy',
- hashAlgorithm: ALGO
+ index.insert(CACHE, KEY2, INTEGRITY, {
+ metadata: 'haayyyy'
}),
- index.insert(CACHE, KEY3, DIGEST, {
- metadata: 'haayyyy again',
- hashAlgorithm: ALGO
+ index.insert(CACHE, KEY3, INTEGRITY, {
+ metadata: 'haayyyy again'
}),
(entryA, entryB) => ({
[entryA.key]: entryA,
@@ -129,7 +123,7 @@ test('accepts function for custom user filtering of index entries', t => {
})
test('removes corrupted content', t => {
- const cpath = contentPath(CACHE, DIGEST)
+ const cpath = contentPath(CACHE, INTEGRITY)
return mockCache().then(() => {
return fs.truncateAsync(cpath, CONTENT.length - 1)
}).then(() => {
@@ -158,8 +152,8 @@ test('removes corrupted content', t => {
test('removes content not referenced by any entries', t => {
const fixture = new Tacks(CacheContent({
- [DIGEST]: CONTENT
- }, ALGO))
+ [INTEGRITY]: CONTENT
+ }))
fixture.create(CACHE)
return verify(CACHE).then(stats => {
delete stats.startTime