Skip to content
This repository has been archived by the owner on Jul 3, 2019. It is now read-only.

Commit

Permalink
fix(index): add length header before JSON for verification
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Mar 4, 2017
1 parent 655799e commit fb8cb4d
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 16 deletions.
25 changes: 22 additions & 3 deletions lib/entry-index.js
Expand Up @@ -29,8 +29,16 @@ function insert (cache, key, digest, opts) {
time: +(new Date()),
metadata: opts.metadata
}
const stringified = JSON.stringify(entry)
// NOTE - Cleverness ahoy!
//
// This works because it's tremendously unlikely for an entry to corrupt
// another while still preserving the string length of the JSON in
// question. So, we just slap the length in there and verify it on read.
//
// Thanks to @isaacs for the whiteboarding session that ended up with this.
return appendFileAsync(
bucket, '\n' + JSON.stringify(entry)
bucket, `\n${stringified.length}\t${stringified}`
).then(() => entry)
}).then(entry => (
fixOwner.chownr(bucket, opts.uid, opts.gid).then(() => (
Expand All @@ -46,10 +54,16 @@ function find (cache, key) {
let ret
return Promise.fromNode(cb => {
pipe(stream, split('\n', null, {trailing: true}).on('data', function (l) {
const pieces = l.split('\t')
if (!pieces[1] || pieces[1].length !== parseInt(pieces[0], 10)) {
// Length is no good! Corruption ahoy!
return
}
let obj
try {
obj = JSON.parse(l)
obj = JSON.parse(pieces[1])
} catch (e) {
// Entry is corrupted!
return
}
if (obj && (obj.key === key)) {
Expand Down Expand Up @@ -92,9 +106,14 @@ function lsStream (cache) {
if (err) { return cb(err) }
const entries = {}
data.split('\n').slice(1).forEach(function (entry) {
const pieces = entry.split('\t')
if (pieces[1].length !== parseInt(pieces[0], 10)) {
// Length is no good! Corruption ahoy!
return
}
let parsed
try {
parsed = JSON.parse(entry)
parsed = JSON.parse(pieces[1])
} catch (e) {
}
// NOTE - it's possible for an entry to be
Expand Down
13 changes: 8 additions & 5 deletions test/index.find.js
Expand Up @@ -171,12 +171,15 @@ test('index.find garbled data in index file', function (t) {
// to the process crashing). In this case, the corrupt entry
// will simply be skipped.
const key = 'whatever'
const stringified = JSON.stringify({
key: key,
digest: 'deadbeef',
time: 54321
})
const fixture = new Tacks(CacheIndex({
'whatever': '\n' + JSON.stringify({
key: key,
digest: 'deadbeef',
time: 54321
}) + '\n{"key": "' + key + '"\noway'
'whatever': '\n' +
`${stringified.length}\t${stringified}` +
'\n{"key": "' + key + '"\noway'
}))
fixture.create(CACHE)
return index.find(CACHE, key).then(info => {
Expand Down
18 changes: 11 additions & 7 deletions test/index.insert.js
Expand Up @@ -12,7 +12,6 @@ const testDir = require('./util/test-dir')(__filename)
Promise.promisifyAll(fs)

const CACHE = path.join(testDir, 'cache')
const Dir = Tacks.Dir
const index = require('../lib/entry-index')

const KEY = 'foo'
Expand All @@ -35,7 +34,9 @@ test('basic insertion', function (t) {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
t.equal(data[0], '\n', 'first entry starts with a \\n')
const entry = JSON.parse(data)
const split = data.split('\t')
t.equal(parseInt(split[0], 10), split[1].length, 'length header correct')
const entry = JSON.parse(split[1])
t.ok(entry.time, 'entry has a timestamp')
t.deepEqual(entry, {
key: KEY,
Expand All @@ -55,7 +56,9 @@ test('inserts additional entries into existing key', function (t) {
)).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
const entries = data.split('\n').slice(1).map(JSON.parse)
const entries = data.split('\n').slice(1).map(line => {
return JSON.parse(line.split('\t')[1])
})
entries.forEach(function (e) { delete e.time })
t.deepEqual(entries, [{
key: KEY,
Expand All @@ -70,6 +73,7 @@ test('inserts additional entries into existing key', function (t) {
})

test('separates entries even if one is corrupted', function (t) {
// TODO - check that middle-of-string corrupted writes won't hurt.
const fixture = new Tacks(CacheIndex({
'foo': '\n' + JSON.stringify({
key: KEY,
Expand All @@ -83,7 +87,7 @@ test('separates entries even if one is corrupted', function (t) {
).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
const entry = JSON.parse(data.split('\n')[4])
const entry = JSON.parse(data.split('\n')[4].split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: KEY,
Expand All @@ -99,7 +103,7 @@ test('optional arbitrary metadata', function (t) {
).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
const entry = JSON.parse(data)
const entry = JSON.parse(data.split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: KEY,
Expand Down Expand Up @@ -147,7 +151,7 @@ test('path-breaking characters', function (t) {
const bucket = index._bucketPath(CACHE, newKey)
return fs.readFileAsync(bucket, 'utf8')
}).then(data => {
const entry = JSON.parse(data)
const entry = JSON.parse(data.split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: newKey,
Expand All @@ -167,7 +171,7 @@ test('extremely long keys', function (t) {
const bucket = index._bucketPath(CACHE, newKey)
return fs.readFileAsync(bucket, 'utf8')
}).then(data => {
const entry = JSON.parse(data)
const entry = JSON.parse(data.split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: newKey,
Expand Down
5 changes: 4 additions & 1 deletion test/util/cache-index.js
Expand Up @@ -26,7 +26,10 @@ function CacheIndex (entries, hashAlgorithm) {
if (typeof lines.length !== 'number') {
lines = [lines]
}
serialised = '\n' + lines.map(JSON.stringify).join('\n')
serialised = '\n' + lines.map(line => {
const stringified = JSON.stringify(line)
return `${stringified.length}\t${stringified}`
}).join('\n')
}
insertContent(tree, parts, serialised)
})
Expand Down

0 comments on commit fb8cb4d

Please sign in to comment.