Skip to content

Commit

Permalink
feat(promise): individually promisifing functions as needed
Browse files Browse the repository at this point in the history
  • Loading branch information
billatnpm authored and isaacs committed Sep 15, 2019
1 parent 28aeeac commit 74b939e
Show file tree
Hide file tree
Showing 23 changed files with 222 additions and 195 deletions.
22 changes: 12 additions & 10 deletions get.js
@@ -1,13 +1,15 @@
'use strict'

const BB = require('bluebird')

const figgyPudding = require('figgy-pudding')
const fs = require('fs')
const { pipe, pipeline, through } = require('mississippi')
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
const pipe = require('mississippi').pipe
const pipeline = require('mississippi').pipeline
const read = require('./lib/content/read')
const through = require('mississippi').through

const writeFile = BB.promisify(fs.writeFile)

const GetOpts = figgyPudding({
integrity: {},
Expand Down Expand Up @@ -38,19 +40,19 @@ function getData (byDigest, cache, key, opts) {
}
return (
byDigest ? Promise.resolve(null) : index.find(cache, key, opts)
).then(entry => {
).then((entry) => {
if (!entry && !byDigest) {
throw new index.NotFoundError(cache, key)
}
return read(cache, byDigest ? key : entry.integrity, {
integrity: opts.integrity,
size: opts.size
}).then(data => byDigest ? data : {
}).then((data) => byDigest ? data : {
metadata: entry.metadata,
data: data,
size: entry.size,
integrity: entry.integrity
}).then(res => {
}).then((res) => {
if (opts.memoize && byDigest) {
memo.put.byDigest(cache, key, res, opts)
} else if (opts.memoize) {
Expand Down Expand Up @@ -124,7 +126,7 @@ function getStream (cache, key, opts) {
stream.write(memoized.data, () => stream.end())
return stream
}
index.find(cache, key).then(entry => {
index.find(cache, key).then((entry) => {
if (!entry) {
return stream.emit(
'error', new index.NotFoundError(cache, key)
Expand Down Expand Up @@ -220,7 +222,7 @@ function copy (byDigest, cache, key, dest, opts) {
if (read.copy) {
return (
byDigest ? Promise.resolve(null) : index.find(cache, key, opts)
).then(entry => {
).then((entry) => {
if (!entry && !byDigest) {
throw new index.NotFoundError(cache, key)
}
Expand All @@ -233,8 +235,8 @@ function copy (byDigest, cache, key, dest, opts) {
})
})
} else {
return getData(byDigest, cache, key, opts).then(res => {
return fs.writeFileAsync(dest, byDigest ? res : res.data)
return getData(byDigest, cache, key, opts).then((res) => {
return writeFile(dest, byDigest ? res : res.data)
.then(() => byDigest ? key : {
metadata: res.metadata,
size: res.size,
Expand Down
16 changes: 8 additions & 8 deletions lib/content/read.js
Expand Up @@ -10,8 +10,8 @@ const pipe = BB.promisify(require('mississippi').pipe)
const ssri = require('ssri')
const Y = require('../util/y.js')

const lstatAsync = BB.promisify(fs.lstat)
const readFileAsync = BB.promisify(fs.readFile)
const lstat = BB.promisify(fs.lstat)
const readFile = BB.promisify(fs.readFile)

const ReadOpts = figgyPudding({
size: {}
Expand All @@ -21,7 +21,7 @@ module.exports = read
function read (cache, integrity, opts) {
opts = ReadOpts(opts)
return withContentSri(cache, integrity, (cpath, sri) => {
return readFileAsync(cpath, null).then(data => {
return readFile(cpath, null).then((data) => {
if (typeof opts.size === 'number' && opts.size !== data.length) {
throw sizeError(opts.size, data.length)
} else if (ssri.checkData(data, sri)) {
Expand Down Expand Up @@ -54,7 +54,7 @@ function readStream (cache, integrity, opts) {
opts = ReadOpts(opts)
const stream = new PassThrough()
withContentSri(cache, integrity, (cpath, sri) => {
return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
return lstat(cpath).then((stat) => ({ cpath, sri, stat }))
}).then(({ cpath, sri, stat }) => {
return pipe(
fs.createReadStream(cpath),
Expand All @@ -70,17 +70,17 @@ function readStream (cache, integrity, opts) {
return stream
}

let copyFileAsync
let copyFile
if (fs.copyFile) {
module.exports.copy = copy
module.exports.copy.sync = copySync
copyFileAsync = BB.promisify(fs.copyFile)
copyFile = BB.promisify(fs.copyFile)
}

function copy (cache, integrity, dest, opts) {
opts = ReadOpts(opts)
return withContentSri(cache, integrity, (cpath, sri) => {
return copyFileAsync(cpath, dest)
return copyFile(cpath, dest)
})
}

Expand All @@ -95,7 +95,7 @@ module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
if (!integrity) { return BB.resolve(false) }
return withContentSri(cache, integrity, (cpath, sri) => {
return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
}).catch((err) => {
if (err.code === 'ENOENT') { return false }
if (err.code === 'EPERM') {
Expand Down
2 changes: 1 addition & 1 deletion lib/content/rm.js
Expand Up @@ -8,7 +8,7 @@ const rimraf = BB.promisify(require('rimraf'))

module.exports = rm
function rm (cache, integrity) {
return hasContent(cache, integrity).then(content => {
return hasContent(cache, integrity).then((content) => {
if (content) {
const sri = content.sri
if (sri) {
Expand Down
10 changes: 5 additions & 5 deletions lib/content/write.js
Expand Up @@ -11,11 +11,11 @@ const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const rimraf = BB.promisify(require('rimraf'))
const ssri = require('ssri')
const to = require('mississippi').to
const { to } = require('mississippi')
const uniqueFilename = require('unique-filename')
const Y = require('../util/y.js')

const writeFileAsync = BB.promisify(fs.writeFile)
const writeFile = BB.promisify(fs.writeFile)

module.exports = write

Expand All @@ -37,7 +37,7 @@ function write (cache, data, opts) {
}
return makeTmp(cache, opts)
.then((tmp) => {
return writeFileAsync(
return writeFile(
tmp.target, data, { flag: 'wx' }
).then(() => moveToDestination(tmp, cache, sri, opts))
.then((result) => makeTmpDisposer(tmp, result))
Expand Down Expand Up @@ -69,7 +69,7 @@ function writeStream (cache, opts) {
e.code = 'ENODATA'
return ret.emit('error', e)
}
allDone.then(res => {
allDone.then((res) => {
res.integrity && ret.emit('integrity', res.integrity)
res.size !== null && ret.emit('size', res.size)
cb()
Expand All @@ -90,7 +90,7 @@ function handleContent (inputStream, cache, opts, errCheck) {
errCheck()
return pipeToTmp(
inputStream, cache, tmp.target, opts, errCheck
).then(res => {
).then((res) => {
return moveToDestination(
tmp, cache, res.integrity, opts, errCheck
).then(() => res)
Expand Down
22 changes: 10 additions & 12 deletions lib/entry-index.js
Expand Up @@ -8,18 +8,16 @@ const figgyPudding = require('figgy-pudding')
const fixOwner = require('./util/fix-owner')
const fs = require('graceful-fs')
const hashToSegments = require('./util/hash-to-segments')
const ms = require('mississippi')
const { concat, from } = require('mississippi')
const path = require('path')
const ssri = require('ssri')
const Y = require('./util/y.js')

const indexV = require('../package.json')['cache-version'].index

const appendFileAsync = BB.promisify(fs.appendFile)
const readFileAsync = BB.promisify(fs.readFile)
const readdirAsync = BB.promisify(fs.readdir)
const concat = ms.concat
const from = ms.from
const appendFile = BB.promisify(fs.appendFile)
const readFile = BB.promisify(fs.readFile)
const readdir = BB.promisify(fs.readdir)

module.exports.NotFoundError = class NotFoundError extends Error {
constructor (cache, key) {
Expand Down Expand Up @@ -57,7 +55,7 @@ function insert (cache, key, integrity, opts) {
// question. So, we just slap the length in there and verify it on read.
//
// Thanks to @isaacs for the whiteboarding session that ended up with this.
return appendFileAsync(
return appendFile(
bucket, `\n${hashEntry(stringified)}\t${stringified}`
)
}).then(
Expand Down Expand Up @@ -106,7 +104,7 @@ function insertSync (cache, key, integrity, opts) {
module.exports.find = find
function find (cache, key) {
const bucket = bucketPath(cache, key)
return bucketEntries(bucket).then(entries => {
return bucketEntries(bucket).then((entries) => {
return entries.reduce((latest, next) => {
if (next && next.key === key) {
return formatEntry(cache, next)
Expand Down Expand Up @@ -175,7 +173,7 @@ function lsStream (cache) {
return acc
}, new Map())

return getKeyToEntry.then(reduced => {
return getKeyToEntry.then((reduced) => {
for (let entry of reduced.values()) {
const formatted = formatEntry(cache, entry)
formatted && stream.push(formatted)
Expand Down Expand Up @@ -205,9 +203,9 @@ function ls (cache) {
}

function bucketEntries (bucket, filter) {
return readFileAsync(
return readFile(
bucket, 'utf8'
).then(data => _bucketEntries(data, filter))
).then((data) => _bucketEntries(data, filter))
}

function bucketEntriesSync (bucket, filter) {
Expand Down Expand Up @@ -283,7 +281,7 @@ function formatEntry (cache, entry) {
}

function readdirOrEmpty (dir) {
return readdirAsync(dir)
return readdir(dir)
.catch((err) => {
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
return []
Expand Down
2 changes: 1 addition & 1 deletion lib/util/fix-owner.js
Expand Up @@ -44,7 +44,7 @@ function fixOwner (cache, filepath) {
return BB.resolve()
}

return BB.resolve(inferOwner(cache)).then(owner => {
return BB.resolve(inferOwner(cache)).then((owner) => {
const { uid, gid } = owner

// No need to override if it's already what we used.
Expand Down
30 changes: 17 additions & 13 deletions lib/verify.js
Expand Up @@ -13,7 +13,10 @@ const path = require('path')
const rimraf = BB.promisify(require('rimraf'))
const ssri = require('ssri')

BB.promisifyAll(fs)
const stat = BB.promisify(fs.stat)
const truncate = BB.promisify(fs.truncate)
const writeFile = BB.promisify(fs.writeFile)
const readFile = BB.promisify(fs.readFile)

const VerifyOpts = figgyPudding({
concurrency: {
Expand All @@ -40,7 +43,7 @@ function verify (cache, opts) {
], (stats, step, i) => {
const label = step.name || `step #${i}`
const start = new Date()
return BB.resolve(step(cache, opts)).then(s => {
return BB.resolve(step(cache, opts)).then((s) => {
s && Object.keys(s).forEach(k => {
stats[k] = s[k]
})
Expand Down Expand Up @@ -96,7 +99,7 @@ function garbageCollect (cache, opts) {
follow: false,
nodir: true,
nosort: true
}).then(files => {
}).then((files) => {
return BB.resolve({
verifiedContent: 0,
reclaimedCount: 0,
Expand All @@ -109,7 +112,7 @@ function garbageCollect (cache, opts) {
const algo = split[split.length - 4]
const integrity = ssri.fromHex(digest, algo)
if (liveContent.has(integrity.toString())) {
return verifyContent(f, integrity).then(info => {
return verifyContent(f, integrity).then((info) => {
if (!info.valid) {
stats.reclaimedCount++
stats.badContentCount++
Expand All @@ -123,7 +126,7 @@ function garbageCollect (cache, opts) {
} else {
// No entries refer to this content. We can delete.
stats.reclaimedCount++
return fs.statAsync(f).then(s => {
return stat(f).then((s) => {
return rimraf(f).then(() => {
stats.reclaimedSize += s.size
return stats
Expand All @@ -137,9 +140,9 @@ function garbageCollect (cache, opts) {
}

function verifyContent (filepath, sri) {
return fs.statAsync(filepath).then(stat => {
return stat(filepath).then((s) => {
const contentInfo = {
size: stat.size,
size: s.size,
valid: true
}
return ssri.checkStream(
Expand All @@ -161,7 +164,7 @@ function verifyContent (filepath, sri) {

function rebuildIndex (cache, opts) {
opts.log.silly('verify', 'rebuilding index')
return index.ls(cache).then(entries => {
return index.ls(cache).then((entries) => {
const stats = {
missingContent: 0,
rejectedEntries: 0,
Expand Down Expand Up @@ -194,12 +197,12 @@ function rebuildIndex (cache, opts) {
}

function rebuildBucket (cache, bucket, stats, opts) {
return fs.truncateAsync(bucket._path).then(() => {
return truncate(bucket._path).then(() => {
// This needs to be serialized because cacache explicitly
// lets very racy bucket conflicts clobber each other.
return BB.mapSeries(bucket, entry => {
const content = contentPath(cache, entry.integrity)
return fs.statAsync(content).then(() => {
return stat(content).then(() => {
return index.insert(cache, entry.key, entry.integrity, {
metadata: entry.metadata,
size: entry.size
Expand All @@ -225,15 +228,16 @@ function writeVerifile (cache, opts) {
const verifile = path.join(cache, '_lastverified')
opts.log.silly('verify', 'writing verifile to ' + verifile)
try {
return fs.writeFileAsync(verifile, '' + (+(new Date())))
return writeFile(verifile, '' + (+(new Date())))
} finally {
fixOwner.chownr.sync(cache, verifile)
}
}

module.exports.lastRun = lastRun

function lastRun (cache) {
return fs.readFileAsync(
return readFile(
path.join(cache, '_lastverified'), 'utf8'
).then(data => new Date(+data))
).then((data) => new Date(+data))
}
8 changes: 4 additions & 4 deletions put.js
Expand Up @@ -4,7 +4,7 @@ const figgyPudding = require('figgy-pudding')
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
const write = require('./lib/content/write')
const to = require('mississippi').to
const { to } = require('mississippi')

const PutOpts = figgyPudding({
algorithms: {
Expand All @@ -25,10 +25,10 @@ const PutOpts = figgyPudding({
module.exports = putData
function putData (cache, key, data, opts) {
opts = PutOpts(opts)
return write(cache, data, opts).then(res => {
return write(cache, data, opts).then((res) => {
return index.insert(
cache, key, res.integrity, opts.concat({ size: res.size })
).then(entry => {
).then((entry) => {
if (opts.memoize) {
memo.put(cache, entry, data, opts)
}
Expand Down Expand Up @@ -62,7 +62,7 @@ function putStream (cache, key, opts) {
})
}, cb => {
contentStream.end(() => {
index.insert(cache, key, integrity, opts.concat({ size })).then(entry => {
index.insert(cache, key, integrity, opts.concat({ size })).then((entry) => {
if (opts.memoize) {
memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
}
Expand Down

0 comments on commit 74b939e

Please sign in to comment.