Skip to content
This repository has been archived by the owner on Jul 3, 2019. It is now read-only.

Commit

Permalink
feat(extract): append _resolved and _integrity automatically
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Feb 6, 2018
1 parent ad06925 commit 7550924
Show file tree
Hide file tree
Showing 8 changed files with 108 additions and 22 deletions.
40 changes: 38 additions & 2 deletions extract.js
Expand Up @@ -4,12 +4,18 @@ const BB = require('bluebird')

const cacache = require('cacache')
const extractStream = require('./lib/extract-stream')
const fs = require('fs')
const mkdirp = BB.promisify(require('mkdirp'))
const npa = require('npm-package-arg')
const optCheck = require('./lib/util/opt-check')
const path = require('path')
const retry = require('promise-retry')
const rimraf = BB.promisify(require('rimraf'))

const truncateAsync = BB.promisify(fs.truncate)
const readFileAsync = BB.promisify(fs.readFile)
const appendFileAsync = BB.promisify(fs.appendFile)

module.exports = extract
function extract (spec, dest, opts) {
opts = optCheck(opts)
Expand Down Expand Up @@ -60,7 +66,7 @@ function extract (spec, dest, opts) {

function extractByDigest (start, spec, dest, opts) {
return mkdirp(dest).then(() => {
const xtractor = extractStream(dest, opts)
const xtractor = extractStream(spec, dest, opts)
const cached = cacache.get.stream.byDigest(opts.cache, opts.integrity, opts)
cached.pipe(xtractor)
return new BB((resolve, reject) => {
Expand All @@ -80,18 +86,48 @@ function extractByDigest (start, spec, dest, opts) {

let fetch
function extractByManifest (start, spec, dest, opts) {
let integrity = opts.integrity
let resolved = opts.resolved
return mkdirp(dest).then(() => {
const xtractor = extractStream(dest, opts)
const xtractor = extractStream(spec, dest, opts)
if (!fetch) {
fetch = require('./lib/fetch')
}
const tardata = fetch.tarball(spec, opts)
if (!resolved) {
tardata.on('manifest', m => {
resolved = m._resolved
})
tardata.on('integrity', i => {
integrity = i
})
}
tardata.pipe(xtractor)
return new BB((resolve, reject) => {
tardata.on('error', reject)
xtractor.on('error', reject)
xtractor.on('close', resolve)
})
}).then(() => {
if (!opts.resolved) {
const pjson = path.join(dest, 'package.json')
return readFileAsync(pjson, 'utf8')
.then(str => {
return truncateAsync(pjson)
.then(() => {
return appendFileAsync(pjson, str.replace(
/}\s*$/,
`\n,"_resolved": ${
JSON.stringify(resolved || '')
}\n,"_integrity": ${
JSON.stringify(integrity || '')
}\n,"_from": ${
JSON.stringify(spec.toString())
}\n}`
))
})
})
}
}).then(() => {
opts.log.silly('pacote', `${spec} extracted in ${Date.now() - start}ms`)
}).catch(err => {
Expand Down
26 changes: 25 additions & 1 deletion lib/extract-stream.js
@@ -1,5 +1,6 @@
'use strict'

const PassThrough = require('stream').PassThrough
const path = require('path')
const tar = require('tar')

Expand All @@ -10,7 +11,29 @@ function computeMode (fileMode, optMode, umask) {
return (fileMode | optMode) & ~(umask || 0)
}

function extractStream (dest, opts) {
function pkgJsonTransform (spec, opts) {
return entry => {
if (entry.path === 'package.json') {
const transformed = new PassThrough()
let str = ''
entry.on('end', () => transformed.end(str.replace(
/}\s*$/,
`\n,"_resolved": ${
JSON.stringify(opts.resolved || '')
}\n,"_integrity": ${
JSON.stringify(opts.integrity || '')
}\n,"_from": ${
JSON.stringify(spec.toString())
}\n}`
)))
entry.on('error', e => transformed.emit('error'))
entry.on('data', d => { str += d })
return transformed
}
}
}

function extractStream (spec, dest, opts) {
opts = opts || {}
const sawIgnores = new Set()
return tar.x({
Expand All @@ -20,6 +43,7 @@ function extractStream (dest, opts) {
onwarn: msg => opts.log && opts.log.warn('tar', msg),
uid: opts.uid,
gid: opts.gid,
transform: opts.resolved && pkgJsonTransform(spec, opts),
onentry (entry) {
if (entry.type.toLowerCase() === 'file') {
entry.mode = computeMode(entry.mode, opts.fmode, opts.umask)
Expand Down
6 changes: 3 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Expand Up @@ -60,7 +60,7 @@
"safe-buffer": "^5.1.1",
"semver": "^5.4.1",
"ssri": "^5.1.0",
"tar": "^4.2.0",
"tar": "^4.3.3",
"unique-filename": "^1.1.0",
"which": "^1.3.0"
},
Expand Down
2 changes: 1 addition & 1 deletion test/directory.js
Expand Up @@ -78,7 +78,7 @@ test('supports directory deps', t => {
path.join(EXT, 'x', 'mybin'), 'utf8'
),
(xpkg, xsr, xbin) => {
t.deepEqual(JSON.parse(xpkg), pkg, 'extracted package.json')
t.similar(JSON.parse(xpkg), pkg, 'extracted package.json')
t.deepEqual(JSON.parse(xsr), sr, 'extracted npm-shrinkwrap.json')
t.deepEqual(xbin, 'console.log("hi there")', 'extracted binary')
}
Expand Down
2 changes: 1 addition & 1 deletion test/extract-stream.chown.js
Expand Up @@ -61,7 +61,7 @@ test('accepts gid and uid opts', {skip: !process.getuid}, t => {
fs: fsClone
})
return mockTar(pkg, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('.', {
return pipe(tarStream, extractStream('foo@1', '.', {
uid: NEWUID,
gid: NEWGID,
log: npmlog
Expand Down
10 changes: 5 additions & 5 deletions test/extract-stream.compute-mode.js
Expand Up @@ -3,11 +3,11 @@ const test = require('tap').test
const computeMode = require('../lib/extract-stream.js')._computeMode

const tests = {
"same": {umask: 0o022, entryMode: 0o755, optMode: 0o755, result: 0o755},
"opt high": {umask: 0o022, entryMode: 0o755, optMode: 0o777, result: 0o755},
"entry high": {umask: 0o022, entryMode: 0o777, optMode: 0o755, result: 0o755},
"opt low": {umask: 0o022, entryMode: 0o000, optMode: 0o400, result: 0o400},
"entry low": {umask: 0o022, entryMode: 0o400, optMode: 0o000, result: 0o400}
'same': {umask: 0o022, entryMode: 0o755, optMode: 0o755, result: 0o755},
'opt high': {umask: 0o022, entryMode: 0o755, optMode: 0o777, result: 0o755},
'entry high': {umask: 0o022, entryMode: 0o777, optMode: 0o755, result: 0o755},
'opt low': {umask: 0o022, entryMode: 0o000, optMode: 0o400, result: 0o400},
'entry low': {umask: 0o022, entryMode: 0o400, optMode: 0o000, result: 0o400}
}

test('computeMode', t => {
Expand Down
42 changes: 34 additions & 8 deletions test/extract-stream.js
Expand Up @@ -28,7 +28,7 @@ test('basic extraction', t => {
}
t.plan(2)
return mockTar(pkg, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./'))
return pipe(tarStream, extractStream('foo@1', './'))
}).then(() => {
return fs.readFileAsync('./package.json', 'utf8')
}).then(data => {
Expand All @@ -39,6 +39,32 @@ test('basic extraction', t => {
})
})

test('adds metadata fields if resolved/integrity are present', t => {
const pkg = {
'package.json': JSON.stringify({
name: 'foo',
version: '1.0.0'
}),
'index.js': 'console.log("hello world!")'
}
return mockTar(pkg, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('foo@1', './', {
resolved: 'https://stuff.is.here',
integrity: 'sha1-deadbeef'
}))
}).then(() => {
return fs.readFileAsync('./package.json', 'utf8')
}).then(data => {
t.deepEqual(JSON.parse(data), {
name: 'foo',
version: '1.0.0',
_resolved: 'https://stuff.is.here',
_integrity: 'sha1-deadbeef',
_from: 'foo@1'
}, 'extracted package.json')
})
})

test('automatically handles gzipped tarballs', t => {
const pkg = {
'package.json': JSON.stringify({
Expand All @@ -48,7 +74,7 @@ test('automatically handles gzipped tarballs', t => {
'index.js': 'console.log("hello world!")'
}
return mockTar(pkg, {gzip: true, stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./', OPTS))
return pipe(tarStream, extractStream('foo@1', './', OPTS))
}).then(() => {
return BB.join(
fs.readFileAsync('./package.json', 'utf8'),
Expand All @@ -70,7 +96,7 @@ test('strips first item in path, even if not `package/`', t => {
'something-else/index.js': 'console.log("hello world!")'
}
return mockTar(pkg, {noPrefix: true, stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./', OPTS))
return pipe(tarStream, extractStream('foo@1', './', OPTS))
}).then(() => {
return BB.join(
fs.readFileAsync('./package.json', 'utf8'),
Expand All @@ -96,7 +122,7 @@ test('excludes symlinks', t => {
'symmylinky': { type: 'SymbolicLink', linkname: '../nowhere' }
}
return mockTar(pkg, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./', OPTS))
return pipe(tarStream, extractStream('foo@1', './', OPTS))
}).then(() => {
return BB.join(
fs.readFileAsync('./package.json', 'utf8').then(data => {
Expand Down Expand Up @@ -131,7 +157,7 @@ test('renames .gitignore to .npmignore if not present', t => {
'index.js': 'console.log("hello world!")',
'.gitignore': 'tada!'
}, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./no-npmignore', OPTS))
return pipe(tarStream, extractStream('foo@1', './no-npmignore', OPTS))
}).then(() => {
return fs.readFileAsync(
'./no-npmignore/.npmignore', 'utf8'
Expand All @@ -151,7 +177,7 @@ test('renames .gitignore to .npmignore if not present', t => {
'.gitignore': 'git!',
'.npmignore': 'npm!'
}, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./has-npmignore1', OPTS))
return pipe(tarStream, extractStream('foo@1', './has-npmignore1', OPTS))
}).then(() => {
return BB.join(
fs.readFileAsync(
Expand Down Expand Up @@ -182,7 +208,7 @@ test('renames .gitignore to .npmignore if not present', t => {
'.npmignore': 'npm!',
'.gitignore': 'git!'
}, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./has-npmignore2', OPTS))
return pipe(tarStream, extractStream('foo@1', './has-npmignore2', OPTS))
}).then(() => {
return BB.join(
fs.readFileAsync(
Expand Down Expand Up @@ -221,7 +247,7 @@ test('accepts dmode/fmode/umask opts', {
}
}
return mockTar(pkg, {stream: true}).then(tarStream => {
return pipe(tarStream, extractStream('./', {
return pipe(tarStream, extractStream('foo@1', './', {
dmode: 0o644,
fmode: 0o666,
umask: 0o022
Expand Down

0 comments on commit 7550924

Please sign in to comment.