From c2a0948fb7b70862f92828e7b37b622566ed367e Mon Sep 17 00:00:00 2001 From: isaacs Date: Thu, 5 Aug 2021 22:28:21 -0700 Subject: [PATCH] fix: refactoring to pass tests on Windows This is a larger refactoring than I tend to prefer to do in a single commit, but here goes. - The path normalization of \ to / is made more comprehensive. - Checking to ensure we aren't overwriting the cwd is done earlier in the unpack process, and more thoroughly, so there is less need for repetitive checks later. - The cwd is checked at the start in our recursive mkdir, saving an extra fs.mkdir call which would almost always result in an EEXIST. - Many edge cases resulting in dangling file descriptors were found and addressed. (Much as I complain about Windows stubbornly refusing to delete files currently open, it did come in handy here.) - The Unpack[MAKEFS] methods are refactored for readability, and no longer rely on fall-through behavior which made the sync and async versions slightly different in some edge cases. - Many of the tests were refactored to use async rimraf (the better to avoid Windows problems) and more modern tap affordances. Note: coverage on Windows is not 100%, due to skipping many tests that use symbolic links. Given the value of having those code paths covered, I believe that adding istanbul hints to skip coverage of those portions of the code would be a bad idea. And given the complexity and hazards involved in mocking that much of the filesystem implementation, it's probably best to just let Windows not have 100% coverage. --- lib/mkdir.js | 65 ++--- lib/normalize-windows-path.js | 2 +- lib/read-entry.js | 7 +- lib/replace.js | 3 +- lib/unpack.js | 199 ++++++++++----- lib/write-entry.js | 28 ++- test/create.js | 13 +- test/extract.js | 79 +++--- test/pack.js | 35 +-- test/read-entry.js | 8 +- test/replace.js | 279 +++++++++++++--------- test/unpack.js | 438 ++++++++++++++++++++-------------- test/update.js | 298 ++++++++++++++--------- test/write-entry.js | 95 +++++--- 14 files changed, 950 insertions(+), 599 deletions(-) diff --git a/lib/mkdir.js b/lib/mkdir.js index f75ccaa9..a0719e6c 100644 --- a/lib/mkdir.js +++ b/lib/mkdir.js @@ -37,8 +37,17 @@ class CwdError extends Error { const cGet = (cache, key) => cache.get(normPath(key)) const cSet = (cache, key, val) => cache.set(normPath(key), val) +const checkCwd = (dir, cb) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) + er = new CwdError(dir, er && er.code || 'ENOTDIR') + cb(er) + }) +} + module.exports = (dir, opt, cb) => { dir = normPath(dir) + // if there's any overlap between mask and mode, // then we'll need an explicit chmod const umask = opt.umask @@ -73,18 +82,13 @@ module.exports = (dir, opt, cb) => { if (cache && cGet(cache, dir) === true) return done() - if (dir === cwd) { - return fs.stat(dir, (er, st) => { - if (er || !st.isDirectory()) - er = new CwdError(dir, er && er.code || 'ENOTDIR') - done(er) - }) - } + if (dir === cwd) + return checkCwd(dir, done) if (preserve) return mkdirp(dir, {mode}).then(made => done(null, made), done) - const sub = path.relative(cwd, dir) + const sub = normPath(path.relative(cwd, dir)) const parts = sub.split('/') mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) } @@ -93,7 +97,7 @@ const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { if (!parts.length) return cb(null, created) const p = parts.shift() - const part = base + '/' + p + const part = normPath(path.resolve(base + '/' + p)) if (cGet(cache, part)) return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) @@ -101,14 +105,11 @@ const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { if (er) { - if (er.path && path.dirname(er.path) === cwd && - (er.code === 'ENOTDIR' || er.code === 'ENOENT')) - return cb(new CwdError(cwd, er.code)) - fs.lstat(part, (statEr, st) => { - if (statEr) + if (statEr) { + statEr.path = statEr.path && normPath(statEr.path) cb(statEr) - else if (st.isDirectory()) + } else if (st.isDirectory()) mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) else if (unlink) { fs.unlink(part, er => { @@ -127,6 +128,19 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { } } +const checkCwdSync = dir => { + let ok = false + let code = 'ENOTDIR' + try { + ok = fs.statSync(dir).isDirectory() + } catch (er) { + code = er.code + } finally { + if (!ok) + throw new CwdError(dir, code) + } +} + module.exports.sync = (dir, opt) => { dir = normPath(dir) // if there's any overlap between mask and mode, @@ -158,29 +172,20 @@ module.exports.sync = (dir, opt) => { return done() if (dir === cwd) { - let ok = false - let code = 'ENOTDIR' - try { - ok = fs.statSync(dir).isDirectory() - } catch (er) { - code = er.code - } finally { - if (!ok) - throw new CwdError(dir, code) - } - done() - return + checkCwdSync(cwd) + return done() } if (preserve) return done(mkdirp.sync(dir, mode)) - const sub = path.relative(cwd, dir) + const sub = normPath(path.relative(cwd, dir)) const parts = sub.split('/') let created = null for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) { + part = normPath(path.resolve(part)) if (cGet(cache, part)) continue @@ -189,10 +194,6 @@ module.exports.sync = (dir, opt) => { created = created || part cSet(cache, part, true) } catch (er) { - if (er.path && path.dirname(er.path) === cwd && - (er.code === 'ENOTDIR' || er.code === 'ENOENT')) - return new CwdError(cwd, er.code) - const st = fs.lstatSync(part) if (st.isDirectory()) { cSet(cache, part, true) diff --git a/lib/normalize-windows-path.js b/lib/normalize-windows-path.js index 8e3c30a0..eb13ba01 100644 --- a/lib/normalize-windows-path.js +++ b/lib/normalize-windows-path.js @@ -5,4 +5,4 @@ const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform module.exports = platform !== 'win32' ? p => p - : p => p.replace(/\\/g, '/') + : p => p && p.replace(/\\/g, '/') diff --git a/lib/read-entry.js b/lib/read-entry.js index 6661cba5..183a6050 100644 --- a/lib/read-entry.js +++ b/lib/read-entry.js @@ -1,5 +1,6 @@ 'use strict' const MiniPass = require('minipass') +const normPath = require('./normalize-windows-path.js') const SLURP = Symbol('slurp') module.exports = class ReadEntry extends MiniPass { @@ -46,7 +47,7 @@ module.exports = class ReadEntry extends MiniPass { this.ignore = true } - this.path = header.path + this.path = normPath(header.path) this.mode = header.mode if (this.mode) this.mode = this.mode & 0o7777 @@ -58,7 +59,7 @@ module.exports = class ReadEntry extends MiniPass { this.mtime = header.mtime this.atime = header.atime this.ctime = header.ctime - this.linkpath = header.linkpath + this.linkpath = normPath(header.linkpath) this.uname = header.uname this.gname = header.gname @@ -93,7 +94,7 @@ module.exports = class ReadEntry extends MiniPass { // a global extended header, because that's weird. if (ex[k] !== null && ex[k] !== undefined && !(global && k === 'path')) - this[k] = ex[k] + this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] } } } diff --git a/lib/replace.js b/lib/replace.js index e5e2a425..1374f3f2 100644 --- a/lib/replace.js +++ b/lib/replace.js @@ -170,7 +170,8 @@ const replace = (opt, files, cb) => { fs.fstat(fd, (er, st) => { if (er) - return reject(er) + return fs.close(fd, () => reject(er)) + getPos(fd, st.size, (er, position) => { if (er) return reject(er) diff --git a/lib/unpack.js b/lib/unpack.js index 38a7bbaf..32950159 100644 --- a/lib/unpack.js +++ b/lib/unpack.js @@ -40,14 +40,10 @@ const SKIP = Symbol('skip') const DOCHOWN = Symbol('doChown') const UID = Symbol('uid') const GID = Symbol('gid') +const CHECKED_CWD = Symbol('checkedCwd') const crypto = require('crypto') const getFlag = require('./get-write-flag.js') -/* istanbul ignore next */ -const neverCalled = () => { - throw new Error('sync function called cb somehow?!?') -} - // Unlinks on Windows are not atomic. // // This means that if you have a file entry, followed by another @@ -115,6 +111,8 @@ class Unpack extends Parser { super(opt) + this[CHECKED_CWD] = false + this.reservations = pathReservations() this.transform = typeof opt.transform === 'function' ? opt.transform : null @@ -217,8 +215,6 @@ class Unpack extends Parser { if (parts.length < this.strip) return false entry.path = parts.slice(this.strip).join('/') - if (entry.path === '' && entry.type !== 'Directory' && entry.type !== 'GNUDumpDir') - return false if (entry.type === 'Link') { const linkparts = normPath(entry.linkpath).split('/') @@ -249,18 +245,26 @@ class Unpack extends Parser { } } - // only encode : chars that aren't drive letter indicators - if (this.win32) { - const parsed = path.win32.parse(entry.path) - entry.path = parsed.root === '' ? wc.encode(entry.path) - : parsed.root + wc.encode(entry.path.substr(parsed.root.length)) - } - if (path.isAbsolute(entry.path)) - entry.absolute = normPath(entry.path) + entry.absolute = normPath(path.resolve(entry.path)) else entry.absolute = normPath(path.resolve(this.cwd, entry.path)) + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') + return false + + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = path.win32.parse(entry.absolute) + entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length)) + const { root: pRoot } = path.win32.parse(entry.path) + entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length)) + } + return true } @@ -350,6 +354,7 @@ class Unpack extends Parser { stream.on('error', er => { if (stream.fd) fs.close(stream.fd, () => {}) + // flush all the data out so that we aren't left hanging // if the error wasn't actually fatal. otherwise the parse // is blocked, and we never proceed. @@ -364,6 +369,7 @@ class Unpack extends Parser { /* istanbul ignore else - we should always have a fd by now */ if (stream.fd) fs.close(stream.fd, () => {}) + this[ONERROR](er, entry) fullyDone() return @@ -509,32 +515,76 @@ class Unpack extends Parser { if (entry.type !== 'Directory') pruneCache(this.dirCache, entry.absolute) - this[MKDIR](path.dirname(entry.absolute), this.dmode, er => { - if (er) { - this[ONERROR](er, entry) - done() - return + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + this[CHECKED_CWD] = true + start() + }) + } + + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = normPath(path.dirname(entry.absolute)) + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + afterMakeParent() + }) + } } - fs.lstat(entry.absolute, (er, st) => { + afterMakeParent() + } + + const afterMakeParent = () => { + fs.lstat(entry.absolute, (lstatEr, st) => { if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { this[SKIP](entry) done() - } else if (er || this[ISREUSABLE](entry, st)) - this[MAKEFS](null, entry, done) - else if (st.isDirectory()) { + return + } + if (lstatEr || this[ISREUSABLE](entry, st)) + return this[MAKEFS](null, entry, done) + + if (st.isDirectory()) { if (entry.type === 'Directory') { - if (!this.noChmod && (!entry.mode || (st.mode & 0o7777) === entry.mode)) - this[MAKEFS](null, entry, done) - else { - fs.chmod(entry.absolute, entry.mode, - er => this[MAKEFS](er, entry, done)) - } - } else - fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry, done)) - } else - unlinkFile(entry.absolute, er => this[MAKEFS](er, entry, done)) + const needChmod = !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const afterChmod = er => this[MAKEFS](er, entry, done) + if (!needChmod) + return afterChmod() + return fs.chmod(entry.absolute, entry.mode, afterChmod) + } + // not a dir entry, have to remove it. + if (entry.absolute !== this.cwd) { + return fs.rmdir(entry.absolute, er => + this[MAKEFS](er, entry, done)) + } + } + + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) + return this[MAKEFS](null, entry, done) + + unlinkFile(entry.absolute, er => + this[MAKEFS](er, entry, done)) }) - }) + } + + if (this[CHECKED_CWD]) + start() + else + checkCwd() } [MAKEFS] (er, entry, done) { @@ -576,38 +626,67 @@ class Unpack extends Parser { } } +const callSync = fn => { + try { + return [null, fn()] + } catch (er) { + return [er, null] + } +} class UnpackSync extends Unpack { + [MAKEFS] (er, entry) { + return super[MAKEFS](er, entry, () => {}) + } + [CHECKFS] (entry) { if (entry.type !== 'Directory') pruneCache(this.dirCache, entry.absolute) - const er = this[MKDIR](path.dirname(entry.absolute), this.dmode, neverCalled) - if (er) - return this[ONERROR](er, entry) - try { - const st = fs.lstatSync(entry.absolute) - if (this.keep || this.newer && st.mtime > entry.mtime) - return this[SKIP](entry) - else if (this[ISREUSABLE](entry, st)) - return this[MAKEFS](null, entry, neverCalled) - else { - try { - if (st.isDirectory()) { - if (entry.type === 'Directory') { - if (!this.noChmod && entry.mode && (st.mode & 0o7777) !== entry.mode) - fs.chmodSync(entry.absolute, entry.mode) - } else - fs.rmdirSync(entry.absolute) - } else - unlinkFileSync(entry.absolute) - return this[MAKEFS](null, entry, neverCalled) - } catch (er) { - return this[ONERROR](er, entry) - } + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode) + if (er) + return this[ONERROR](er, entry) + this[CHECKED_CWD] = true + } + + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = normPath(path.dirname(entry.absolute)) + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode) + if (mkParent) + return this[ONERROR](mkParent, entry) } - } catch (er) { - return this[MAKEFS](null, entry, neverCalled) } + + const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) + if (st && (this.keep || this.newer && st.mtime > entry.mtime)) + return this[SKIP](entry) + + if (lstatEr || this[ISREUSABLE](entry, st)) + return this[MAKEFS](null, entry) + + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const [er] = needChmod ? callSync(() => { + fs.chmodSync(entry.absolute, entry.mode) + }) : [] + return this[MAKEFS](er, entry) + } + // not a dir entry, have to remove it + const [er] = callSync(() => fs.rmdirSync(entry.absolute)) + this[MAKEFS](er, entry) + } + + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? [] + : callSync(() => unlinkFileSync(entry.absolute)) + this[MAKEFS](er, entry) } [FILE] (entry, _) { diff --git a/lib/write-entry.js b/lib/write-entry.js index 598fe8ee..3702f2ae 100644 --- a/lib/write-entry.js +++ b/lib/write-entry.js @@ -9,7 +9,7 @@ const stripSlash = require('./strip-trailing-slashes.js') const prefixPath = (path, prefix) => { if (!prefix) - return path + return normPath(path) path = normPath(path).replace(/^\.(\/|$)/, '') return stripSlash(prefix) + '/' + path } @@ -33,6 +33,7 @@ const MODE = Symbol('mode') const AWAITDRAIN = Symbol('awaitDrain') const ONDRAIN = Symbol('ondrain') const PREFIX = Symbol('prefix') +const HAD_ERROR = Symbol('hadError') const warner = require('./warn-mixin.js') const winchars = require('./winchars.js') const stripAbsolutePath = require('./strip-absolute-path.js') @@ -49,18 +50,18 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { // suppress atime, ctime, uid, gid, uname, gname this.portable = !!opt.portable // until node has builtin pwnam functions, this'll have to do - this.myuid = process.getuid && process.getuid() + this.myuid = process.getuid && process.getuid() || 0 this.myuser = process.env.USER || '' this.maxReadSize = opt.maxReadSize || maxReadSize this.linkCache = opt.linkCache || new Map() this.statCache = opt.statCache || new Map() this.preservePaths = !!opt.preservePaths - this.cwd = opt.cwd || process.cwd() + this.cwd = normPath(opt.cwd || process.cwd()) this.strict = !!opt.strict this.noPax = !!opt.noPax this.noMtime = !!opt.noMtime this.mtime = opt.mtime || null - this.prefix = opt.prefix || null + this.prefix = opt.prefix ? normPath(opt.prefix) : null this.fd = null this.blockLen = null @@ -85,6 +86,8 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this.win32 = !!opt.win32 || process.platform === 'win32' if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. this.path = winchars.decode(this.path.replace(/\\/g, '/')) p = p.replace(/\\/g, '/') } @@ -107,6 +110,12 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this[LSTAT]() } + emit (ev, ...data) { + if (ev === 'error') + this[HAD_ERROR] = true + return super.emit(ev, ...data) + } + [LSTAT] () { fs.lstat(this.absolute, (er, stat) => { if (er) @@ -203,14 +212,14 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { } [ONREADLINK] (linkpath) { - this.linkpath = linkpath.replace(/\\/g, '/') + this.linkpath = normPath(linkpath) this[HEADER]() this.end() } [HARDLINK] (linkpath) { this.type = 'Link' - this.linkpath = path.relative(this.cwd, linkpath).replace(/\\/g, '/') + this.linkpath = normPath(path.relative(this.cwd, linkpath)) this.stat.size = 0 this[HEADER]() this.end() @@ -244,6 +253,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { [ONOPENFILE] (fd) { this.fd = fd + if (this[HAD_ERROR]) + return this[CLOSE]() + this.blockLen = 512 * Math.ceil(this.stat.size / 512) this.blockRemain = this.blockLen const bufLen = Math.min(this.blockLen, this.maxReadSize) @@ -405,7 +417,7 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.prefix = opt.prefix || null - this.path = readEntry.path + this.path = normPath(readEntry.path) this.mode = this[MODE](readEntry.mode) this.uid = this.portable ? null : readEntry.uid this.gid = this.portable ? null : readEntry.gid @@ -415,7 +427,7 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime this.atime = this.portable ? null : readEntry.atime this.ctime = this.portable ? null : readEntry.ctime - this.linkpath = readEntry.linkpath + this.linkpath = normPath(readEntry.linkpath) if (typeof opt.onwarn === 'function') this.on('warn', opt.onwarn) diff --git a/test/create.js b/test/create.js index 13653b1f..83472f1d 100644 --- a/test/create.js +++ b/test/create.js @@ -1,5 +1,6 @@ 'use strict' +const isWindows = process.platform === 'win32' const t = require('tap') const c = require('../lib/create.js') const list = require('../lib/list.js') @@ -12,6 +13,7 @@ const mkdirp = require('mkdirp') const spawn = require('child_process').spawn const Pack = require('../lib/pack.js') const mutateFS = require('mutate-fs') +const {promisify} = require('util') const readtar = (file, cb) => { const child = spawn('tar', ['tf', file]) @@ -21,12 +23,11 @@ const readtar = (file, cb) => { cb(code, signal, Buffer.concat(out).toString())) } -t.teardown(_ => rimraf.sync(dir)) +t.teardown(() => new Promise(resolve => rimraf(dir, resolve))) -t.test('setup', t => { - rimraf.sync(dir) - mkdirp.sync(dir) - t.end() +t.before(async () => { + await promisify(rimraf)(dir) + await mkdirp(dir) }) t.test('no cb if sync or without file', t => { @@ -88,7 +89,7 @@ t.test('create file', t => { }) t.test('with specific mode', t => { - const mode = 0o740 + const mode = isWindows ? 0o666 : 0o740 t.test('sync', t => { const file = path.resolve(dir, 'sync-mode.tar') c({ diff --git a/test/extract.js b/test/extract.js index c52e7d3a..16a52bd9 100644 --- a/test/extract.js +++ b/test/extract.js @@ -7,47 +7,46 @@ const fs = require('fs') const extractdir = path.resolve(__dirname, 'fixtures/extract') const tars = path.resolve(__dirname, 'fixtures/tars') const mkdirp = require('mkdirp') -const rimraf = require('rimraf') +const {promisify} = require('util') +const rimraf = promisify(require('rimraf')) const mutateFS = require('mutate-fs') -t.teardown(_ => rimraf.sync(extractdir)) +t.teardown(_ => rimraf(extractdir)) t.test('basic extracting', t => { const file = path.resolve(tars, 'utf8.tar') const dir = path.resolve(extractdir, 'basic') - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { fs.lstatSync(dir + '/Ω.txt') fs.lstatSync(dir + '/🌟.txt') t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) - rimraf.sync(dir) + await rimraf(dir) t.end() } const files = ['🌟.txt', 'Ω.txt'] t.test('sync', t => { x({ file: file, sync: true, C: dir }, files) - check(t) + return check(t) }) t.test('async promisey', t => { - return x({ file: file, cwd: dir }, files).then(_ => { - check(t) - }) + return x({ file: file, cwd: dir }, files).then(_ => check(t)) }) t.test('async cb', t => { return x({ file: file, cwd: dir }, files, er => { if (er) throw er - check(t) + return check(t) }) }) @@ -58,18 +57,18 @@ t.test('file list and filter', t => { const file = path.resolve(tars, 'utf8.tar') const dir = path.resolve(extractdir, 'filter') - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { fs.lstatSync(dir + '/Ω.txt') t.throws(_ => fs.lstatSync(dir + '/🌟.txt')) t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) - rimraf.sync(dir) + await rimraf(dir) t.end() } @@ -77,12 +76,12 @@ t.test('file list and filter', t => { t.test('sync', t => { x({ filter: filter, file: file, sync: true, C: dir }, ['🌟.txt', 'Ω.txt']) - check(t) + return check(t) }) t.test('async promisey', t => { return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt']).then(_ => { - check(t) + return check(t) }) }) @@ -90,7 +89,7 @@ t.test('file list and filter', t => { return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt'], er => { if (er) throw er - check(t) + return check(t) }) }) @@ -101,28 +100,28 @@ t.test('no file list', t => { const file = path.resolve(tars, 'body-byte-counts.tar') const dir = path.resolve(extractdir, 'no-list') - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) - rimraf.sync(dir) + await rimraf(dir) t.end() } t.test('sync', t => { x({ file: file, sync: true, C: dir }) - check(t) + return check(t) }) t.test('async promisey', t => { return x({ file: file, cwd: dir }).then(_ => { - check(t) + return check(t) }) }) @@ -130,7 +129,7 @@ t.test('no file list', t => { return x({ file: file, cwd: dir }, er => { if (er) throw er - check(t) + return check(t) }) }) @@ -142,28 +141,28 @@ t.test('read in itty bits', t => { const file = path.resolve(tars, 'body-byte-counts.tar') const dir = path.resolve(extractdir, 'no-list') - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) - const check = t => { + const check = async t => { t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) - rimraf.sync(dir) + await rimraf(dir) t.end() } t.test('sync', t => { x({ file: file, sync: true, C: dir, maxReadSize: maxReadSize }) - check(t) + return check(t) }) t.test('async promisey', t => { return x({ file: file, cwd: dir, maxReadSize: maxReadSize }).then(_ => { - check(t) + return check(t) }) }) @@ -171,7 +170,7 @@ t.test('read in itty bits', t => { return x({ file: file, cwd: dir, maxReadSize: maxReadSize }, er => { if (er) throw er - check(t) + return check(t) }) }) @@ -206,15 +205,15 @@ t.test('read fail', t => { t.end() }) -t.test('sync gzip error edge case test', t => { +t.test('sync gzip error edge case test', async t => { const file = path.resolve(__dirname, 'fixtures/sync-gzip-fail.tgz') const dir = path.resolve(__dirname, 'sync-gzip-fail') const cwd = process.cwd() - mkdirp.sync(dir + '/x') + await mkdirp(dir + '/x') process.chdir(dir) - t.teardown(() => { + t.teardown(async () => { process.chdir(cwd) - rimraf.sync(dir) + await rimraf(dir) }) x({ diff --git a/test/pack.js b/test/pack.js index 54a24a69..2df29614 100644 --- a/test/pack.js +++ b/test/pack.js @@ -18,6 +18,8 @@ const EE = require('events').EventEmitter const rimraf = require('rimraf') const mkdirp = require('mkdirp') const ReadEntry = require('../lib/read-entry.js') +const isWindows = process.platform === 'win32' +const normPath = require('../lib/normalize-windows-path.js') const ctime = new Date('2017-05-10T01:03:12.000Z') const atime = new Date('2017-04-17T00:00:00.000Z') @@ -58,7 +60,7 @@ t.test('pack a file', t => { cksumValid: true, needPax: false, path: 'one-byte.txt', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 1, mtime: mtime, cksum: Number, @@ -102,7 +104,7 @@ t.test('pack a file with a prefix', t => { cksumValid: true, needPax: false, path: 'package/.dotfile', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 2, mtime: mtime, cksum: Number, @@ -217,7 +219,7 @@ t.test('use process cwd if cwd not specified', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -245,7 +247,7 @@ t.test('use process cwd if cwd not specified', t => { cksumValid: true, needPax: false, path: 'dir/x', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 0, mtime: mtime, cksum: Number, @@ -283,7 +285,7 @@ t.test('filter', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -329,7 +331,7 @@ t.test('add the same dir twice (exercise cache code)', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -575,7 +577,7 @@ t.test('pipe into a slow reader', t => { cksumValid: true, needPax: false, path: 'long-path/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -634,7 +636,7 @@ t.test('pipe into a slow gzip reader', t => { cksumValid: true, needPax: false, path: 'long-path/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, mtime: mtime, cksum: Number, @@ -700,11 +702,13 @@ t.test('warnings', t => { const data = Buffer.concat(out) t.equal(data.length, 2048) t.match(warnings, [[ - 'TAR_ENTRY_INFO', /stripping .* from absolute path/, { path: f }, + 'TAR_ENTRY_INFO', + /stripping .* from absolute path/, + { path: normPath(f) }, ]]) t.match(new Header(data), { - path: f.replace(/^(\/|[a-z]:\\\\)/, ''), + path: normPath(f).replace(/^(\/|[a-z]:\/)/i, ''), }) t.end() }) @@ -728,7 +732,7 @@ t.test('warnings', t => { t.equal(warnings.length, 0) t.match(new Header(data), { - path: f, + path: normPath(f), }) t.end() }) @@ -741,7 +745,10 @@ t.test('warnings', t => { strict: true, cwd: files, }).end(f).on('error', e => { - t.match(e, { message: /stripping .* from absolute path/, path: f }) + t.match(e, { + message: /stripping .* from absolute path/, + path: normPath(f), + }) t.end() }) }) @@ -794,7 +801,7 @@ t.test('no dir recurse', t => { t.end() }) -t.test('follow', t => { +t.test('follow', { skip: isWindows && 'file symlinks not available' }, t => { const check = (out, t) => { const data = Buffer.concat(out) t.equal(data.length, 2048) @@ -803,7 +810,7 @@ t.test('follow', t => { cksumValid: true, needPax: false, path: 'symlink', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 26, }) t.match(data.slice(512).toString(), /this link is like diamond\n\0+$/) diff --git a/test/read-entry.js b/test/read-entry.js index 19bf2a09..4e12e87a 100644 --- a/test/read-entry.js +++ b/test/read-entry.js @@ -5,7 +5,7 @@ const Header = require('../lib/header.js') t.test('create read entry', t => { const h = new Header({ - path: 'foo.txt', + path: 'oof.txt', mode: 0o755, uid: 24561, gid: 20, @@ -19,17 +19,17 @@ t.test('create read entry', t => { }) h.encode() - const entry = new ReadEntry(h, { x: 'y' }, { z: 0, a: null, b: undefined }) + const entry = new ReadEntry(h, { x: 'y', path: 'foo.txt' }, { z: 0, a: null, b: undefined }) t.ok(entry.header.cksumValid, 'header checksum should be valid') t.match(entry, { - extended: { x: 'y' }, + extended: { x: 'y', path: 'foo.txt' }, globalExtended: { z: 0, a: null, b: undefined }, header: { cksumValid: true, needPax: false, - path: 'foo.txt', + path: 'oof.txt', mode: 0o755, uid: 24561, gid: 20, diff --git a/test/replace.js b/test/replace.js index 0ad04489..80326b72 100644 --- a/test/replace.js +++ b/test/replace.js @@ -3,57 +3,29 @@ const t = require('tap') const r = require('../lib/replace.js') const path = require('path') const fs = require('fs') -const mkdirp = require('mkdirp') -const rimraf = require('rimraf') const mutateFS = require('mutate-fs') const list = require('../lib/list.js') +const {resolve} = require('path') const fixtures = path.resolve(__dirname, 'fixtures') -const dir = path.resolve(fixtures, 'replace') const tars = path.resolve(fixtures, 'tars') -const file = dir + '/body-byte-counts.tar' -const fileNoNulls = dir + '/no-null-eof.tar' -const fileTruncHead = dir + '/truncated-head.tar' -const fileTruncBody = dir + '/truncated-body.tar' -const fileNonExistent = dir + '/does-not-exist.tar' -const fileZeroByte = dir + '/zero.tar' -const fileEmpty = dir + '/empty.tar' -const fileCompressed = dir + '/compressed.tgz' const zlib = require('zlib') const spawn = require('child_process').spawn -t.teardown(_ => rimraf.sync(dir)) - -const reset = () => { - rimraf.sync(dir) - mkdirp.sync(dir) - const data = fs.readFileSync(tars + '/body-byte-counts.tar') - fs.writeFileSync(file, data) - - const dataNoNulls = data.slice(0, data.length - 1024) - fs.writeFileSync(fileNoNulls, dataNoNulls) - - const dataTruncHead = Buffer.concat([dataNoNulls, data.slice(0, 500)]) - fs.writeFileSync(fileTruncHead, dataTruncHead) - - const dataTruncBody = Buffer.concat([dataNoNulls, data.slice(0, 700)]) - fs.writeFileSync(fileTruncBody, dataTruncBody) - - fs.writeFileSync(fileZeroByte, '') - fs.writeFileSync(fileEmpty, Buffer.alloc(1024)) - - fs.writeFileSync(fileCompressed, zlib.gzipSync(data)) +const data = fs.readFileSync(tars + '/body-byte-counts.tar') +const dataNoNulls = data.slice(0, data.length - 1024) +const fixtureDef = { + 'body-byte-counts.tar': data, + 'no-null-eof.tar': dataNoNulls, + 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), + 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'zero.tar': Buffer.from(''), + 'empty.tar': Buffer.alloc(512), + 'compressed.tgz': zlib.gzipSync(data), } -t.test('setup', t => { - reset() - t.end() -}) - t.test('basic file add to archive (good or truncated)', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file], { stdio: [0, 'pipe', 2] }) const out = [] @@ -61,7 +33,7 @@ t.test('basic file add to archive (good or truncated)', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -73,50 +45,68 @@ t.test('basic file add to archive (good or truncated)', t => { }) } - ;[file, - fileNoNulls, - fileTruncHead, - fileTruncBody, - ].forEach(file => { - const fileList = [path.basename(__filename)] - t.test(path.basename(file), t => { - t.test('sync', t => { + const files = [ + 'body-byte-counts.tar', + 'no-null-eof.tar', + 'truncated-head.tar', + 'truncated-body.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + const fileList = [path.basename(__filename)] + t.test('sync', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { r({ sync: true, - file: file, + file: resolve(dir, file), cwd: __dirname, }, fileList) - check(file, t) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { r({ - file: file, + file: resolve(dir, file), cwd: __dirname, }, fileList, er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { r({ - file: file, + file: resolve(dir, file), cwd: __dirname, - }, fileList).then(_ => check(file, t)) + }, fileList).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) t.test('add to empty archive', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file]) const out = [] @@ -132,70 +122,91 @@ t.test('add to empty archive', t => { }) } - ;[fileNonExistent, - fileEmpty, - fileZeroByte, - ].forEach(file => { - t.test(path.basename(file), t => { - t.test('sync', t => { + const files = [ + 'empty.tar', + 'zero.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + files.push('not-existing.tar') + + t.test('sync', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { r({ sync: true, - file: file, + file: resolve(dir, file), cwd: __dirname, }, [path.basename(__filename)]) - check(file, t) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { r({ - file: file, + file: resolve(dir, file), cwd: __dirname, }, [path.basename(__filename)], er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', async t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { r({ - file: file, + file: resolve(dir, file), cwd: __dirname, - }, [path.basename(__filename)]).then(_ => check(file, t)) + }, [path.basename(__filename)]).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) -t.test('cannot append to gzipped archives', t => { - reset() +t.test('cannot append to gzipped archives', async t => { + const dir = t.testdir({ + 'compressed.tgz': fixtureDef['compressed.tgz'], + }) + const file = resolve(dir, 'compressed.tgz') const expect = new Error('cannot append to compressed archives') const expectT = new TypeError('cannot append to compressed archives') t.throws(_ => r({ - file: fileCompressed, + file, cwd: __dirname, gzip: true, }, [path.basename(__filename)]), expectT) t.throws(_ => r({ - file: fileCompressed, + file, cwd: __dirname, sync: true, }, [path.basename(__filename)]), expect) - r({ - file: fileCompressed, + return r({ + file, cwd: __dirname, - }, [path.basename(__filename)], er => { - t.match(er, expect) - t.end() - }) + }, [path.basename(__filename)], er => t.match(er, expect)) }) t.test('other throws', t => { @@ -206,37 +217,61 @@ t.test('other throws', t => { }) t.test('broken open', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('open', poop)) - t.throws(_ => r({ sync: true, file: file }, ['README.md']), poop) - r({ file: file }, ['README.md'], er => { + t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + r({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) t.test('broken fstat', t => { + const td = { + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + } const poop = new Error('poop') - t.teardown(mutateFS.fail('fstat', poop)) - t.throws(_ => r({ sync: true, file: file }, ['README.md']), poop) - r({ file: file }, ['README.md'], er => { - t.match(er, poop) + t.test('sync', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + t.throws(_ => r({ sync: true, file }, ['README.md']), poop) t.end() }) + t.test('async', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + r({ file }, ['README.md'], async er => { + t.match(er, poop) + t.end() + }) + }) + t.end() }) t.test('broken read', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) - t.throws(_ => r({ sync: true, file: file }, ['README.md']), poop) - r({ file: file }, ['README.md'], er => { + t.throws(_ => r({ sync: true, file }, ['README.md']), poop) + r({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) -t.test('mtime cache', t => { - t.beforeEach(reset) +t.test('mtime cache', async t => { + const td = { + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + } let mtimeCache @@ -247,7 +282,7 @@ t.test('mtime cache', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -268,9 +303,11 @@ t.test('mtime cache', t => { } t.test('sync', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') r({ sync: true, - file: file, + file, cwd: __dirname, mtimeCache: mtimeCache = new Map(), }, [path.basename(__filename)]) @@ -278,8 +315,10 @@ t.test('mtime cache', t => { }) t.test('async cb', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') r({ - file: file, + file, cwd: __dirname, mtimeCache: mtimeCache = new Map(), }, [path.basename(__filename)], er => { @@ -290,8 +329,10 @@ t.test('mtime cache', t => { }) t.test('async promise', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') r({ - file: file, + file, cwd: __dirname, mtimeCache: mtimeCache = new Map(), }, [path.basename(__filename)]).then(_ => check(file, t)) @@ -301,20 +342,19 @@ t.test('mtime cache', t => { }) t.test('create tarball out of another tarball', t => { - const out = path.resolve(dir, 'out.tar') - - t.beforeEach(() => { - fs.writeFileSync(out, fs.readFileSync(path.resolve(tars, 'dir.tar'))) - }) + const td = { + 'out.tar': fs.readFileSync(path.resolve(tars, 'dir.tar')), + } - const check = t => { + const check = (out, t) => { const expect = [ 'dir/', 'Ω.txt', '🌟.txt', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', ] - list({ f: out, + list({ + f: out, sync: true, onentry: entry => { t.equal(entry.path, expect.shift()) @@ -324,19 +364,36 @@ t.test('create tarball out of another tarball', t => { } t.test('sync', t => { + const dir = t.testdir(td) + const out = resolve(dir, 'out.tar') r({ f: out, cwd: tars, sync: true, }, ['@utf8.tar']) - check(t) + check(out, t) + }) + + t.test('async cb', t => { + const dir = t.testdir(td) + const out = resolve(dir, 'out.tar') + r({ + f: out, + cwd: tars, + }, ['@utf8.tar'], er => { + if (er) + throw er + check(out, t) + }) }) t.test('async', t => { + const dir = t.testdir(td) + const out = resolve(dir, 'out.tar') r({ f: out, cwd: tars, - }, ['@utf8.tar'], _ => check(t)) + }, ['@utf8.tar']).then(() => check(out, t)) }) t.end() diff --git a/test/unpack.js b/test/unpack.js index f901543e..e60915b9 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -16,22 +16,30 @@ const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') const parses = path.resolve(fixtures, 'parse') const unpackdir = path.resolve(fixtures, 'unpack') -const rimraf = require('rimraf') +const {promisify} = require('util') +const rimraf = promisify(require('rimraf')) const mkdirp = require('mkdirp') const mutateFS = require('mutate-fs') const eos = require('end-of-stream') +const normPath = require('../lib/normalize-windows-path.js') -t.teardown(_ => rimraf.sync(unpackdir)) +// On Windows in particular, the "really deep folder path" file +// often tends to cause problems, which don't indicate a failure +// of this library, it's just what happens on Windows with super +// long file paths. +const isWindows = process.platform === 'win32' +const isLongFile = f => f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) -t.test('setup', t => { - rimraf.sync(unpackdir) - mkdirp.sync(unpackdir) - t.end() +t.teardown(_ => rimraf(unpackdir)) + +t.before(async () => { + await rimraf(unpackdir) + await mkdirp(unpackdir) }) t.test('basic file unpack tests', t => { const basedir = path.resolve(unpackdir, 'basic') - t.teardown(_ => rimraf.sync(basedir)) + t.teardown(_ => rimraf(basedir)) const cases = { 'emptypax.tar': { @@ -79,17 +87,19 @@ t.test('basic file unpack tests', t => { const tf = path.resolve(tars, tarfile) const dir = path.resolve(basedir, tarfile) const linkdir = path.resolve(basedir, tarfile + '.link') - t.beforeEach(() => { - rimraf.sync(dir) - rimraf.sync(linkdir) - mkdirp.sync(dir) - fs.symlinkSync(dir, linkdir) + t.beforeEach(async () => { + await rimraf(dir) + await rimraf(linkdir) + await mkdirp(dir) + fs.symlinkSync(dir, linkdir, 'junction') }) const check = t => { const expect = cases[tarfile] Object.keys(expect).forEach(file => { const f = path.resolve(dir, file) + if (isWindows && isLongFile(file)) + return t.equal(fs.readFileSync(f, 'utf8'), expect[file], file) }) t.end() @@ -131,7 +141,7 @@ t.test('basic file unpack tests', t => { t.test('cwd default to process cwd', t => { const u = new Unpack() const us = new UnpackSync() - const cwd = process.cwd() + const cwd = normPath(process.cwd()) t.equal(u.cwd, cwd) t.equal(us.cwd, cwd) t.end() @@ -143,8 +153,8 @@ t.test('links!', t => { const stripData = fs.readFileSync(tars + '/links-strip.tar') t.plan(6) - t.beforeEach(() => mkdirp.sync(dir)) - t.afterEach(() => rimraf.sync(dir)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const check = t => { const hl1 = fs.lstatSync(dir + '/hardlink-1') @@ -153,9 +163,12 @@ t.test('links!', t => { t.equal(hl1.ino, hl2.ino) t.equal(hl1.nlink, 2) t.equal(hl2.nlink, 2) - const sym = fs.lstatSync(dir + '/symlink') - t.ok(sym.isSymbolicLink()) - t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + if (!isWindows) { + // doesn't work on win32 without special privs + const sym = fs.lstatSync(dir + '/symlink') + t.ok(sym.isSymbolicLink()) + t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + } t.end() } const checkForStrip = t => { @@ -168,9 +181,11 @@ t.test('links!', t => { t.equal(hl1.ino, hl3.ino) t.equal(hl1.nlink, 3) t.equal(hl2.nlink, 3) - const sym = fs.lstatSync(dir + '/symlink') - t.ok(sym.isSymbolicLink()) - t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + if (!isWindows) { + const sym = fs.lstatSync(dir + '/symlink') + t.ok(sym.isSymbolicLink()) + t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + } t.end() } const checkForStrip3 = t => { @@ -181,7 +196,8 @@ t.test('links!', t => { } catch (e) { err = e } - // can't be extracted because we've passed it in the tar (specially crafted tar for this not to work) + // can't be extracted because we've passed it in the tar + // (specially crafted tar for this not to work) t.equal(err.code, 'ENOENT') t.end() } @@ -203,7 +219,7 @@ t.test('links!', t => { t.test('sync strip', t => { const unpack = new UnpackSync({ cwd: dir, strip: 1 }) - unpack.end(fs.readFileSync(tars + '/links-strip.tar')) + unpack.end(stripData) checkForStrip(t) }) @@ -238,7 +254,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.plan(6) mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) t.beforeEach(() => { // clobber this junk @@ -256,9 +272,11 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.equal(hl1.ino, hl2.ino) t.equal(hl1.nlink, 2) t.equal(hl2.nlink, 2) - const sym = fs.lstatSync(dir + '/symlink') - t.ok(sym.isSymbolicLink()) - t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + if (!isWindows) { + const sym = fs.lstatSync(dir + '/symlink') + t.ok(sym.isSymbolicLink()) + t.equal(fs.readlinkSync(dir + '/symlink'), 'hardlink-2') + } t.end() } @@ -306,7 +324,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('nested dir dupe', t => { const dir = path.resolve(unpackdir, 'nested-dir') mkdirp.sync(dir + '/d/e/e/p') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const expect = { 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', @@ -335,13 +353,15 @@ t.test('nested dir dupe', t => { zip.end(data) }) -t.test('symlink in dir path', t => { +t.test('symlink in dir path', { + skip: isWindows && 'symlinks not fully supported', +}, t => { const dir = path.resolve(unpackdir, 'symlink-junk') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -398,18 +418,22 @@ t.test('symlink in dir path', t => { onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.on('close', _ => { - t.equal(fs.lstatSync(dir + '/d/i').mode & 0o7777, 0o755) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.equal(fs.lstatSync(dir + '/d/i').mode & 0o7777, isWindows ? 0o666 : 0o755) + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + if (!isWindows) { + t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + } t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', - }) + if (!isWindows) { + t.equal(warnings[0][1], 'Cannot extract through symbolic link') + t.match(warnings[0][2], { + name: 'SylinkError', + path: dir + '/d/i/r/symlink/', + symlink: dir + '/d/i/r/symlink', + }) + } t.equal(warnings.length, 1) t.end() }) @@ -423,10 +447,12 @@ t.test('symlink in dir path', t => { onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + if (!isWindows) { + t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + } t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') t.equal(warnings[0][1], 'Cannot extract through symbolic link') @@ -589,7 +615,7 @@ t.test('symlink in dir path', t => { t.test('unsupported entries', t => { const dir = path.resolve(unpackdir, 'unsupported-entries') mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const unknown = new Header({ path: 'qux', type: 'File', size: 4 }) unknown.type = 'Z' unknown.encode() @@ -672,10 +698,10 @@ t.test('unsupported entries', t => { t.test('file in dir path', t => { const dir = path.resolve(unpackdir, 'file-junk') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -745,7 +771,7 @@ t.test('file in dir path', t => { t.test('set umask option', t => { const dir = path.resolve(unpackdir, 'umask') mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const data = makeTar([ { @@ -761,18 +787,18 @@ t.test('set umask option', t => { umask: 0o027, cwd: dir, }).on('close', _ => { - t.equal(fs.statSync(dir + '/d/i/r').mode & 0o7777, 0o750) - t.equal(fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.equal(fs.statSync(dir + '/d/i/r').mode & 0o7777, isWindows ? 0o666 : 0o750) + t.equal(fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) t.end() }).end(data) }) t.test('absolute paths', t => { const dir = path.join(unpackdir, 'absolute-paths') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const absolute = path.resolve(dir, 'd/i/r/absolute') @@ -800,9 +826,10 @@ t.test('absolute paths', t => { t.test('warn and correct', t => { const check = t => { + const r = normPath(root) t.match(warnings, [[ - `stripping ${root}${root}${root}${root} from absolute path`, - { path: absolute, code: 'TAR_ENTRY_INFO' }, + `stripping ${r}${r}${r}${r} from absolute path`, + { path: normPath(absolute), code: 'TAR_ENTRY_INFO' }, ]]) t.ok(fs.lstatSync(path.resolve(dir, relative)).isFile(), 'is file') t.end() @@ -831,6 +858,22 @@ t.test('absolute paths', t => { }) t.test('preserve absolute path', t => { + // if we use the extraAbsolute path here, we end up creating a dir + // like C:\C:\C:\C:\path\to\absolute, which is both 100% valid on + // windows, as well as SUUUUUPER annoying. + const data = makeTar([ + { + path: isWindows ? absolute : extraAbsolute, + type: 'File', + size: 1, + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + 'a', + '', + '', + ]) const check = t => { t.same(warnings, []) t.ok(fs.lstatSync(absolute).isFile(), 'is file') @@ -866,10 +909,10 @@ t.test('absolute paths', t => { t.test('.. paths', t => { const dir = path.join(unpackdir, 'dotted-paths') - t.teardown(_ => rimraf.sync(dir)) - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const fmode = 0o755 @@ -928,7 +971,7 @@ t.test('.. paths', t => { const check = t => { t.same(warnings, []) t.ok(fs.lstatSync(resolved).isFile(), 'is file') - t.equal(fs.lstatSync(resolved).mode & 0o777, fmode, 'mode is 0755') + t.equal(fs.lstatSync(resolved).mode & 0o777, isWindows ? 0o666 : fmode) t.end() } @@ -964,18 +1007,46 @@ t.test('.. paths', t => { t.test('fail all stats', t => { const poop = new Error('poop') poop.code = 'EPOOP' - let unmutate - const dir = path.join(unpackdir, 'stat-fail') + const dir = normPath(path.join(unpackdir, 'stat-fail')) + const { + stat, + fstat, + lstat, + statSync, + fstatSync, + lstatSync, + } = fs + const unmutate = () => Object.assign(fs, { + stat, + fstat, + lstat, + statSync, + fstatSync, + lstatSync, + }) + const mutate = () => { + fs.stat = fs.lstat = fs.fstat = (...args) => { + // don't fail statting the cwd, or we get different errors + if (normPath(args[0]) === dir) + return lstat(dir, args.pop()) + process.nextTick(() => args.pop()(poop)) + } + fs.statSync = fs.lstatSync = fs.fstatSync = (...args) => { + if (normPath(args[0]) === dir) + return lstatSync(dir) + throw poop + } + } const warnings = [] t.beforeEach(() => { warnings.length = 0 mkdirp.sync(dir) - unmutate = mutateFS.statFail(poop) + mutate() }) - t.afterEach(() => { + t.afterEach(async () => { unmutate() - rimraf.sync(dir) + await rimraf(dir) }) const data = makeTar([ @@ -1046,15 +1117,15 @@ t.test('fail all stats', t => { String, { code: 'EISDIR', - path: path.resolve(dir, 'd/i/r/file'), + path: normPath(path.resolve(dir, 'd/i/r/file')), syscall: 'open', }, ], [ String, { - dest: path.resolve(dir, 'd/i/r/link'), - path: path.resolve(dir, 'd/i/r/file'), + dest: normPath(path.resolve(dir, 'd/i/r/link')), + path: normPath(path.resolve(dir, 'd/i/r/file')), syscall: 'link', }, ], @@ -1074,13 +1145,16 @@ t.test('fail symlink', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('symlink', poop) const dir = path.join(unpackdir, 'symlink-fail') - t.teardown(_ => (unmutate(), rimraf.sync(dir))) + t.teardown(async _ => { + unmutate() + await rimraf(dir) + }) const warnings = [] - t.beforeEach(() => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -1135,13 +1209,16 @@ t.test('fail chmod', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('chmod', poop) const dir = path.join(unpackdir, 'chmod-fail') - t.teardown(_ => (unmutate(), rimraf.sync(dir))) + t.teardown(async _ => { + unmutate() + await rimraf(dir) + }) const warnings = [] - t.beforeEach(() => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -1195,13 +1272,13 @@ t.test('fail mkdir', t => { poop.code = 'EPOOP' let unmutate const dir = path.join(unpackdir, 'mkdir-fail') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const warnings = [] - t.beforeEach(() => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) + await rimraf(dir) + await mkdirp(dir) unmutate = mutateFS.fail('mkdir', poop) }) t.afterEach(() => unmutate()) @@ -1220,12 +1297,11 @@ t.test('fail mkdir', t => { ]) const expect = [[ - 'ENOENT: no such file or directory, lstat \'' + - path.resolve(dir, 'dir') + '\'', + 'ENOENT: no such file or directory', { code: 'ENOENT', syscall: 'lstat', - path: path.resolve(dir, 'dir'), + path: normPath(path.resolve(dir, 'dir')), }, ]] @@ -1235,13 +1311,6 @@ t.test('fail mkdir', t => { t.end() } - t.test('async', t => { - new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) - }) - t.test('sync', t => { new UnpackSync({ cwd: dir, @@ -1250,6 +1319,13 @@ t.test('fail mkdir', t => { check(t) }) + t.test('async', t => { + new Unpack({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([w, d]), + }).on('close', _ => check(t)).end(data) + }) + t.end() }) @@ -1258,13 +1334,16 @@ t.test('fail write', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('write', poop) const dir = path.join(unpackdir, 'write-fail') - t.teardown(_ => (unmutate(), rimraf.sync(dir))) + t.teardown(async _ => { + unmutate() + await rimraf(dir) + }) const warnings = [] - t.beforeEach(() => { + t.beforeEach(async () => { warnings.length = 0 - rimraf.sync(dir) - mkdirp.sync(dir) + await rimraf(dir) + await mkdirp(dir) }) const data = makeTar([ @@ -1308,12 +1387,12 @@ t.test('fail write', t => { t.test('skip existing', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const date = new Date('2011-03-27T22:16:31.000Z') - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) fs.writeFileSync(dir + '/x', 'y') fs.utimesSync(dir + '/x', date, date) }) @@ -1360,12 +1439,12 @@ t.test('skip existing', t => { t.test('skip newer', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const date = new Date('2013-12-19T17:00:00.000Z') - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) fs.writeFileSync(dir + '/x', 'y') fs.utimesSync(dir + '/x', date, date) }) @@ -1412,11 +1491,11 @@ t.test('skip newer', t => { t.test('no mtime', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const date = new Date('2011-03-27T22:16:31.000Z') @@ -1477,7 +1556,7 @@ t.test('no mtime', t => { t.test('unpack big enough to pause/drain', t => { const dir = path.resolve(unpackdir, 'drain-clog') mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const stream = fs.createReadStream(fixtures + '/parses.tar') const u = new Unpack({ cwd: dir, @@ -1600,11 +1679,16 @@ t.test('set owner', t => { const unl = mutateFS.fail('lchown', poop) const unf = mutateFS.fail('fchown', poop) - t.teardown(_ => (un(), unf(), unl())) + t.teardown(async () => { + un() + unf() + unl() + await rimraf(dir) + }) t.test('sync', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) let warned = false const u = new Unpack.Sync({ cwd: dir, @@ -1622,7 +1706,7 @@ t.test('set owner', t => { t.test('async', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) let warned = false const u = new Unpack({ cwd: dir, @@ -1638,11 +1722,6 @@ t.test('set owner', t => { u.end(data) }) - t.test('cleanup', t => { - rimraf.sync(dir) - t.end() - }) - t.end() }) @@ -1672,7 +1751,7 @@ t.test('set owner', t => { t.test('sync', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) called = 0 const u = new Unpack.Sync({ cwd: dir, preserveOwner: true }) u.end(data) @@ -1682,7 +1761,7 @@ t.test('set owner', t => { t.test('async', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) called = 0 const u = new Unpack({ cwd: dir, preserveOwner: true }) u.end(data) @@ -1701,15 +1780,15 @@ t.test('set owner', t => { const un = mutateFS.fail('chown', poop) const unf = mutateFS.fail('fchown', poop) const unl = mutateFS.fail('lchown', poop) - t.teardown(_ => { - rimraf.sync(dir) + t.teardown(async _ => { un() unf() unl() + await rimraf(dir) }) - t.beforeEach(() => mkdirp.sync(dir)) - t.afterEach(() => rimraf.sync(dir)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const check = t => { const dirStat = fs.statSync(dir + '/foo') @@ -1761,11 +1840,11 @@ t.test('unpack when dir is not writable', t => { ]) const dir = path.resolve(unpackdir, 'nowrite-dir') - t.beforeEach(() => mkdirp.sync(dir)) - t.afterEach(() => rimraf.sync(dir)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const check = t => { - t.equal(fs.statSync(dir + '/a').mode & 0o7777, 0o744) + t.equal(fs.statSync(dir + '/a').mode & 0o7777, isWindows ? 0o666 : 0o744) t.equal(fs.readFileSync(dir + '/a/b', 'utf8'), 'a') t.end() } @@ -1798,8 +1877,8 @@ t.test('transmute chars on windows', t => { ]) const dir = path.resolve(unpackdir, 'winchars') - t.beforeEach(() => mkdirp.sync(dir)) - t.afterEach(() => rimraf.sync(dir)) + t.beforeEach(() => mkdirp(dir)) + t.afterEach(() => rimraf(dir)) const hex = 'ef80bcef81bcef80beef80bfef80ba2e747874' const uglyName = Buffer.from(hex, 'hex').toString() @@ -1879,10 +1958,10 @@ t.test('use explicit chmod when required by umask', t => { '', ]) - const check = t => { + const check = async t => { const st = fs.statSync(basedir + '/x/y/z') - t.equal(st.mode & 0o777, 0o775) - rimraf.sync(basedir) + t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o775) + await rimraf(basedir) t.end() } @@ -1921,10 +2000,10 @@ t.test('dont use explicit chmod if noChmod flag set', t => { '', ]) - const check = t => { + const check = async t => { const st = fs.statSync(basedir + '/x/y/z') - t.equal(st.mode & 0o777, 0o755) - rimraf.sync(basedir) + t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o755) + await rimraf(basedir) t.end() } @@ -2005,11 +2084,11 @@ t.test('chown implicit dirs and also the entries', t => { '', ]) - const check = t => { + const check = async t => { currentTest = null t.equal(chowns, 8) chowns = 0 - rimraf.sync(basedir) + await rimraf(basedir) t.end() } @@ -2059,7 +2138,7 @@ t.test('chown implicit dirs and also the entries', t => { t.test('bad cwd setting', t => { const basedir = path.resolve(unpackdir, 'bad-cwd') mkdirp.sync(basedir) - t.teardown(_ => rimraf.sync(basedir)) + t.teardown(_ => rimraf(basedir)) const cases = [ // the cwd itself @@ -2101,16 +2180,16 @@ t.test('bad cwd setting', t => { t.throws(_ => new Unpack.Sync(opt).end(data), { name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + cwd + '\'', - path: cwd, + message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), code: 'ENOTDIR', }) new Unpack(opt).on('error', er => { t.match(er, { name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + cwd + '\'', - path: cwd, + message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), code: 'ENOTDIR', }) t.end() @@ -2123,16 +2202,16 @@ t.test('bad cwd setting', t => { t.throws(_ => new Unpack.Sync(opt).end(data), { name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + cwd + '\'', - path: cwd, + message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), code: 'ENOENT', }) new Unpack(opt).on('error', er => { t.match(er, { name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + cwd + '\'', - path: cwd, + message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', + path: normPath(cwd), code: 'ENOENT', }) t.end() @@ -2145,7 +2224,7 @@ t.test('bad cwd setting', t => { t.test('transform', t => { const basedir = path.resolve(unpackdir, 'transform') - t.teardown(_ => rimraf.sync(basedir)) + t.teardown(_ => rimraf(basedir)) const cases = { 'emptypax.tar': { @@ -2193,9 +2272,9 @@ t.test('transform', t => { t.test(tarfile, t => { const tf = path.resolve(tars, tarfile) const dir = path.resolve(basedir, tarfile) - t.beforeEach(() => { - rimraf.sync(dir) - mkdirp.sync(dir) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) }) const check = t => { @@ -2243,7 +2322,7 @@ t.test('transform', t => { t.test('transform error', t => { const dir = path.resolve(unpackdir, 'transform-error') mkdirp.sync(dir) - t.teardown(_ => rimraf.sync(dir)) + t.teardown(_ => rimraf(dir)) const tarfile = path.resolve(tars, 'body-byte-counts.tar') const tardata = fs.readFileSync(tarfile) @@ -2316,13 +2395,12 @@ t.test('futimes/fchown failures', t => { const poop = new Error('poop') const second = new Error('second error') - const reset = () => { - rimraf.sync(dir) - mkdirp.sync(dir) - } + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) - reset() - t.teardown(() => rimraf.sync(dir)) + t.teardown(() => rimraf(dir)) const methods = ['utimes', 'chown'] methods.forEach(method => { @@ -2337,13 +2415,11 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) unpack.on('finish', t.end) unpack.end(tardata) }) t.test('loose', t => { - reset() const unpack = new Unpack({ cwd: dir, forceChown: fc }) unpack.on('finish', t.end) unpack.on('warn', t.fail) @@ -2353,13 +2429,11 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) unpack.end(tardata) t.end() }) t.test('loose', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) unpack.on('warn', t.fail) unpack.end(tardata) @@ -2379,14 +2453,12 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { - reset() const unpack = new Unpack({ cwd: dir, forceChown: fc }) t.plan(3) unpack.on('warn', (code, m, er) => t.equal(er, poop)) @@ -2396,14 +2468,12 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { - reset() const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) t.plan(3) unpack.on('warn', (c, m, er) => t.equal(er, poop)) @@ -2419,7 +2489,7 @@ t.test('futimes/fchown failures', t => { t.test('onentry option is preserved', t => { const basedir = path.resolve(unpackdir, 'onentry-method') mkdirp.sync(basedir) - t.teardown(() => rimraf.sync(basedir)) + t.teardown(() => rimraf(basedir)) let oecalls = 0 const onentry = entry => oecalls++ @@ -2474,7 +2544,7 @@ t.test('onentry option is preserved', t => { t.test('do not reuse hardlinks, only nlink=1 files', t => { const basedir = path.resolve(unpackdir, 'hardlink-reuse') mkdirp.sync(basedir) - t.teardown(() => rimraf.sync(basedir)) + t.teardown(() => rimraf(basedir)) const now = new Date('2018-04-30T18:30:39.025Z') @@ -2545,7 +2615,7 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { errno: Number, code: /^Z/, recoverable: false, - cwd: basedir, + cwd: normPath(basedir), tarCode: 'TAR_ABORT', } const opts = { @@ -2579,7 +2649,14 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { t.test('handle errors on fs.close', t => { const poop = new Error('poop') - t.teardown(mutateFS.fail('close', poop)) + const { close, closeSync } = fs + // have to actually close them, or else windows gets mad + fs.close = (fd, cb) => close(fd, () => cb(poop)) + fs.closeSync = (fd) => { + closeSync(fd) + throw poop + } + t.teardown(() => Object.assign(fs, { close, closeSync })) const dir = path.resolve(unpackdir, 'close-fail') mkdirp.sync(dir + '/sync') mkdirp.sync(dir + '/async') @@ -2602,11 +2679,13 @@ t.test('handle errors on fs.close', t => { .on('error', er => t.equal(er, poop, 'async')) .end(data) t.throws(() => new UnpackSync({ - cwd: dir + '/sync', strict: true, + cwd: normPath(dir + '/sync'), strict: true, }).end(data), poop, 'sync') }) -t.test('drop entry from dirCache if no longer a directory', t => { +t.test('drop entry from dirCache if no longer a directory', { + skip: isWindows && 'symlinks not fully supported', +}, t => { const dir = path.resolve(unpackdir, 'dir-cache-error') mkdirp.sync(dir + '/sync/y') mkdirp.sync(dir + '/async/y') @@ -2679,13 +2758,24 @@ t.test('using strip option when top level file exists', t => { size: 'a'.length, }, 'a', + { + path: 'y', + type: 'GNUDumpDir', + }, + { + path: 'y/b', + type: 'File', + size: 'b'.length, + }, + 'b', '', '', ]) t.plan(2) const check = (t, path) => { t.equal(fs.statSync(path).isDirectory(), true) - t.equal(fs.lstatSync(path + '/a').isFile(), true) + t.equal(fs.readFileSync(path + '/a', 'utf8'), 'a') + t.equal(fs.readFileSync(path + '/b', 'utf8'), 'b') t.throws(() => fs.statSync(path + '/top'), { code: 'ENOENT' }) t.end() } diff --git a/test/update.js b/test/update.js index 740f339d..1e182053 100644 --- a/test/update.js +++ b/test/update.js @@ -3,56 +3,28 @@ const t = require('tap') const u = require('../lib/update.js') const path = require('path') const fs = require('fs') -const mkdirp = require('mkdirp') -const rimraf = require('rimraf') const mutateFS = require('mutate-fs') +const {resolve} = require('path') const fixtures = path.resolve(__dirname, 'fixtures') -const dir = path.resolve(fixtures, 'update') const tars = path.resolve(fixtures, 'tars') -const file = dir + '/body-byte-counts.tar' -const fileNoNulls = dir + '/no-null-eof.tar' -const fileTruncHead = dir + '/truncated-head.tar' -const fileTruncBody = dir + '/truncated-body.tar' -const fileNonExistent = dir + '/does-not-exist.tar' -const fileZeroByte = dir + '/zero.tar' -const fileEmpty = dir + '/empty.tar' -const fileCompressed = dir + '/compressed.tgz' const zlib = require('zlib') const spawn = require('child_process').spawn -t.teardown(_ => rimraf.sync(dir)) - -const reset = () => { - rimraf.sync(dir) - mkdirp.sync(dir) - const data = fs.readFileSync(tars + '/body-byte-counts.tar') - fs.writeFileSync(file, data) - - const dataNoNulls = data.slice(0, data.length - 1024) - fs.writeFileSync(fileNoNulls, dataNoNulls) - - const dataTruncHead = Buffer.concat([dataNoNulls, data.slice(0, 500)]) - fs.writeFileSync(fileTruncHead, dataTruncHead) - - const dataTruncBody = Buffer.concat([dataNoNulls, data.slice(0, 700)]) - fs.writeFileSync(fileTruncBody, dataTruncBody) - - fs.writeFileSync(fileZeroByte, '') - fs.writeFileSync(fileEmpty, Buffer.alloc(1024)) - - fs.writeFileSync(fileCompressed, zlib.gzipSync(data)) +const data = fs.readFileSync(tars + '/body-byte-counts.tar') +const dataNoNulls = data.slice(0, data.length - 1024) +const fixtureDef = { + 'body-byte-counts.tar': data, + 'no-null-eof.tar': dataNoNulls, + 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), + 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'zero.tar': Buffer.from(''), + 'empty.tar': Buffer.alloc(512), + 'compressed.tgz': zlib.gzipSync(data), } -t.test('setup', t => { - reset() - t.end() -}) - t.test('basic file add to archive (good or truncated)', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file]) const out = [] @@ -60,7 +32,7 @@ t.test('basic file add to archive (good or truncated)', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ '1024-bytes.txt', '512-bytes.txt', @@ -72,50 +44,68 @@ t.test('basic file add to archive (good or truncated)', t => { }) } - ;[file, - fileNoNulls, - fileTruncHead, - fileTruncBody, - ].forEach(file => { - t.test(path.basename(file), t => { - const fileList = [path.basename(__filename)] - t.test('sync', t => { + const files = [ + 'body-byte-counts.tar', + 'no-null-eof.tar', + 'truncated-head.tar', + 'truncated-body.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + const fileList = [path.basename(__filename)] + t.test('sync', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { u({ sync: true, - file: file, + file: resolve(dir, file), cwd: __dirname, }, fileList) - check(file, t) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { u({ - file: file, + file: resolve(dir, file), cwd: __dirname, }, fileList, er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', t => { + t.plan(files.length) + const dir = t.testdir(td) + for (const file of files) { + t.test(file, t => { u({ - file: file, + file: resolve(dir, file), cwd: __dirname, - }, fileList).then(_ => check(file, t)) + }, fileList).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) t.test('add to empty archive', t => { - t.beforeEach(reset) - const check = (file, t) => { const c = spawn('tar', ['tf', file]) const out = [] @@ -123,7 +113,7 @@ t.test('add to empty archive', t => { c.on('close', (code, signal) => { t.equal(code, 0) t.equal(signal, null) - const actual = Buffer.concat(out).toString().trim().split('\n') + const actual = Buffer.concat(out).toString().trim().split(/\r?\n/) t.same(actual, [ path.basename(__filename), ]) @@ -131,66 +121,89 @@ t.test('add to empty archive', t => { }) } - ;[fileNonExistent, - fileEmpty, - fileZeroByte, - ].forEach(file => { - t.test(path.basename(file), t => { - const fileList = [path.basename(__filename)] - t.test('sync', t => { + const files = [ + 'empty.tar', + 'zero.tar', + ] + const td = files.map(f => [f, fixtureDef[f]]).reduce((s, [k, v]) => { + s[k] = v + return s + }, {}) + files.push('not-existing.tar') + + t.test('sync', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { u({ sync: true, - file: file, + file: resolve(dir, file), cwd: __dirname, - }, fileList) - check(file, t) + }, [path.basename(__filename)]) + check(resolve(dir, file), t) }) + } + }) - t.test('async cb', t => { + t.test('async cb', t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { u({ - file: file, + file: resolve(dir, file), cwd: __dirname, - }, fileList, er => { + }, [path.basename(__filename)], er => { if (er) throw er - check(file, t) + check(resolve(dir, file), t) }) }) + } + }) - t.test('async promise', t => { + t.test('async', async t => { + const dir = t.testdir(td) + t.plan(files.length) + for (const file of files) { + t.test(file, t => { u({ - file: file, + file: resolve(dir, file), cwd: __dirname, - }, fileList).then(_ => check(file, t)) + }, [path.basename(__filename)]).then(() => { + check(resolve(dir, file), t) + }) }) - - t.end() - }) + } }) t.end() }) t.test('cannot append to gzipped archives', t => { - reset() + const dir = t.testdir({ + 'compressed.tgz': fixtureDef['compressed.tgz'], + }) + const file = resolve(dir, 'compressed.tgz') const expect = new Error('cannot append to compressed archives') const expectT = new TypeError('cannot append to compressed archives') t.throws(_ => u({ - file: fileCompressed, + file, cwd: __dirname, gzip: true, }, [path.basename(__filename)]), expectT) t.throws(_ => u({ - file: fileCompressed, + file, cwd: __dirname, sync: true, }, [path.basename(__filename)]), expect) u({ - file: fileCompressed, + file, cwd: __dirname, }, [path.basename(__filename)], er => { t.match(er, expect) @@ -206,87 +219,150 @@ t.test('other throws', t => { }) t.test('broken open', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('open', poop)) t.throws(_ => u({ sync: true, file: file }, ['README.md']), poop) - u({ file: file }, ['README.md'], er => { + u({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) t.test('broken fstat', t => { + const td = { + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + } const poop = new Error('poop') - t.teardown(mutateFS.fail('fstat', poop)) - t.throws(_ => u({ sync: true, file: file }, ['README.md']), poop) - u({ file: file }, ['README.md'], er => { - t.match(er, poop) + t.test('sync', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + t.throws(_ => u({ sync: true, file }, ['README.md']), poop) t.end() }) + t.test('async', t => { + const dir = t.testdir(td) + const file = resolve(dir, 'body-byte-counts.tar') + t.teardown(mutateFS.fail('fstat', poop)) + u({ file }, ['README.md'], async er => { + t.match(er, poop) + t.end() + }) + }) + t.end() }) t.test('broken read', t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + }) + const file = resolve(dir, 'body-byte-counts.tar') const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) - t.throws(_ => u({ sync: true, file: file }, ['README.md']), poop) - u({ file: file }, ['README.md'], er => { + t.throws(_ => u({ sync: true, file }, ['README.md']), poop) + u({ file }, ['README.md'], er => { t.match(er, poop) t.end() }) }) t.test('do not add older file', t => { - reset() + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + '1024-bytes.txt': '.'.repeat(1024), + foo: 'foo', + }) + const file = resolve(dir, 'body-byte-counts.tar') - const f = dir + '/1024-bytes.txt' - fs.writeFileSync(f, new Array(1025).join('.')) + const f = resolve(dir, '1024-bytes.txt') const oldDate = new Date('1997-04-10T16:57:47.000Z') fs.utimesSync(f, oldDate, oldDate) + // file size should not change + const expect = fixtureDef['body-byte-counts.tar'].length const check = t => { - t.equal(fs.statSync(file).size, 5120) + t.equal(fs.statSync(file).size, expect) t.end() } t.test('sync', t => { - u({ file: file, cwd: dir, sync: true }, ['1024-bytes.txt']) + u({ + mtimeCache: new Map(), + file, + cwd: dir, + sync: true, + filter: path => path === '1024-bytes.txt', + }, ['1024-bytes.txt', 'foo']) check(t) }) t.test('async', t => { - u({ file: file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(t)) + u({ file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(t)) + }) + + t.test('async cb', t => { + u({ file, cwd: dir }, ['1024-bytes.txt'], er => { + if (er) + throw er + check(t) + }) }) t.end() }) t.test('do add newer file', t => { - t.beforeEach(() => { - reset() - const f = dir + '/1024-bytes.txt' - fs.writeFileSync(f, new Array(1025).join('.')) - const newDate = new Date('2017-05-01T22:06:43.736Z') + const setup = t => { + const dir = t.testdir({ + 'body-byte-counts.tar': fixtureDef['body-byte-counts.tar'], + '1024-bytes.txt': '.'.repeat(1024), + foo: 'foo', + }) + + const f = resolve(dir, '1024-bytes.txt') + const newDate = new Date(Date.now() + 24 * 60 * 60 * 1000) fs.utimesSync(f, newDate, newDate) - }) + return dir + } - const check = t => { - t.equal(fs.statSync(file).size, 6656) + // a chunk for the header, then 2 for the body + const expect = fixtureDef['body-byte-counts.tar'].length + 512 + 1024 + const check = (file, t) => { + t.equal(fs.statSync(file).size, expect) t.end() } t.test('sync', t => { + const dir = setup(t) + const file = resolve(dir, 'body-byte-counts.tar') u({ mtimeCache: new Map(), - file: file, + file, cwd: dir, sync: true, filter: path => path === '1024-bytes.txt', - }, ['1024-bytes.txt', 'compressed.tgz']) - check(t) + }, ['1024-bytes.txt', 'foo']) + check(file, t) }) t.test('async', t => { - u({ file: file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(t)) + const dir = setup(t) + const file = resolve(dir, 'body-byte-counts.tar') + u({ file, cwd: dir }, ['1024-bytes.txt']).then(_ => check(file, t)) + }) + + t.test('async cb', t => { + const dir = setup(t) + const file = resolve(dir, 'body-byte-counts.tar') + u({ file, cwd: dir }, ['1024-bytes.txt'], er => { + if (er) + throw er + check(file, t) + }) }) t.end() diff --git a/test/write-entry.js b/test/write-entry.js index fcbfaa2c..b6cd1c3f 100644 --- a/test/write-entry.js +++ b/test/write-entry.js @@ -28,6 +28,7 @@ const chmodr = require('chmodr') const Parser = require('../lib/parse.js') const rimraf = require('rimraf') const isWindows = process.platform === 'win32' +const normPath = require('../lib/normalize-windows-path.js') t.test('set up', t => { const one = fs.statSync(files + '/hardlink-1') @@ -36,7 +37,7 @@ t.test('set up', t => { fs.unlinkSync(files + '/hardlink-2') fs.linkSync(files + '/hardlink-1', files + '/hardlink-2') } - chmodr.sync(files, 0o644) + chmodr.sync(files, isWindows ? 0o666 : 0o644) t.end() }) @@ -63,7 +64,7 @@ t.test('100 byte filename', t => { cksumValid: true, needPax: false, path: '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: null, uname: 'isaacs', @@ -91,7 +92,7 @@ t.test('100 byte filename', t => { cksumValid: true, needPax: false, path: '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: '', uname: 'isaacs', @@ -139,7 +140,7 @@ t.test('directory', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, linkpath: null, uname: 'isaacs', @@ -155,7 +156,7 @@ t.test('directory', t => { cksumValid: true, needPax: false, path: 'dir/', - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, size: 0, linkpath: null, uname: 'isaacs', @@ -179,7 +180,7 @@ t.test('empty path for cwd', t => { cksumValid: true, needPax: false, path: './', - mode: fs.statSync('./').mode & 0o7777, + mode: isWindows ? 0o777 : fs.statSync('./').mode & 0o7777, size: 0, linkpath: null, uname: 'isaacs', @@ -191,7 +192,9 @@ t.test('empty path for cwd', t => { }) }) -t.test('symlink', t => { +t.test('symlink', { + skip: isWindows && 'symlinks not fully supported', +}, t => { const ws = new WriteEntry('symlink', { cwd: files }) let out = [] ws.on('data', c => out.push(c)) @@ -230,7 +233,7 @@ t.test('zero-byte file', t => { path: 'files/zero-byte.txt', cksumValid: true, needPax: false, - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 0, linkpath: null, uname: 'isaacs', @@ -278,7 +281,7 @@ t.test('hardlinks', t => { path: 'files/hardlink-2', cksumValid: true, needPax: false, - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 0, linkpath: 'files/hardlink-1', uname: 'isaacs', @@ -310,7 +313,7 @@ t.test('hardlinks far away', t => { path: 'files/hardlink-2', cksumValid: true, needPax: false, - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 26, linkpath: null, uname: 'isaacs', @@ -334,7 +337,7 @@ t.test('really deep path', t => { cksumValid: true, needPax: true, path: 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: null, uname: 'isaacs', @@ -359,7 +362,7 @@ t.test('no pax', t => { cksumValid: true, needPax: true, path: 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 100, linkpath: null, uname: 'isaacs', @@ -390,10 +393,12 @@ t.test('absolute path', t => { const absolute = path.resolve(files, '512-bytes.txt') const { root } = path.parse(absolute) const f = root + root + root + absolute - const warn = root + root + root + root + const warn = normPath(isWindows ? root : root + root + root + root) t.test('preservePaths=false strict=false', t => { const warnings = [] - const ws = new WriteEntry(f, { + // on windows, c:\c:\c:\... is a valid path, so just use the + // single-root absolute version of it. + const ws = new WriteEntry(isWindows ? absolute : f, { cwd: files, onwarn: (c, m, p) => warnings.push([c, m, p]), }) @@ -403,14 +408,16 @@ t.test('absolute path', t => { out = Buffer.concat(out) t.equal(out.length, 1024) t.match(warnings, [[ - 'TAR_ENTRY_INFO', `stripping ${warn} from absolute path`, { path: f }, + 'TAR_ENTRY_INFO', + `stripping ${warn} from absolute path`, + { path: normPath(isWindows ? absolute : f) }, ]]) t.match(ws.header, { cksumValid: true, needPax: false, - path: f.replace(/^(\/|[a-z]:\\\\){4}/, ''), - mode: 0o644, + path: normPath(absolute.replace(/^(\/|[a-z]:[/\\])*/i, '')), + mode: isWindows ? 0o666 : 0o644, size: 512, linkpath: null, uname: 'isaacs', @@ -428,7 +435,7 @@ t.test('absolute path', t => { ;[true, false].forEach(strict => { t.test('strict=' + strict, t => { const warnings = [] - const ws = new WriteEntry(f, { + const ws = new WriteEntry(isWindows ? absolute : f, { cwd: files, strict: strict, preservePaths: true, @@ -443,8 +450,8 @@ t.test('absolute path', t => { t.match(ws.header, { cksumValid: true, needPax: false, - path: f, - mode: 0o644, + path: normPath(isWindows ? absolute : f), + mode: isWindows ? 0o666 : 0o644, size: 512, linkpath: null, uname: 'isaacs', @@ -460,11 +467,14 @@ t.test('absolute path', t => { t.test('preservePaths=false strict=true', t => { t.throws(_ => { - new WriteEntry(f, { + new WriteEntry(isWindows ? absolute : f, { strict: true, cwd: files, }) - }, { message: /stripping .* from absolute path/, path: f }) + }, { + message: /stripping .* from absolute path/, + path: normPath(isWindows ? absolute : f), + }) t.end() }) @@ -485,7 +495,7 @@ t.test('no user environ, sets uname to empty string', t => { cksumValid: true, needPax: false, path: '512-bytes.txt', - mode: 0o644, + mode: isWindows ? 0o666 : 0o644, size: 512, uname: '', linkpath: null, @@ -525,17 +535,17 @@ t.test('an unsuppored type', { t.test('readlink fail', t => { const expect = { - message: `EINVAL: invalid argument, readlink '${__filename}'`, - code: 'EINVAL', syscall: 'readlink', - path: __filename, + path: String, } // pretend everything is a symbolic link, then read something that isn't t.teardown(mutateFS.statType('SymbolicLink')) - t.throws(_ => new WriteEntry.Sync('write-entry.js', { cwd: __dirname }), - expect) + t.throws(_ => { + return new WriteEntry.Sync('write-entry.js', { cwd: __dirname }) + }, expect) new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { t.match(er, expect) + t.equal(normPath(er.path), normPath(__filename)) t.end() }) }) @@ -556,8 +566,13 @@ t.test('read fail', t => { code: 'EISDIR', syscall: 'read', } - // pretend everything is a symbolic link, then read something that isn't - t.teardown(mutateFS.statType('File')) + // pretend everything is a file, then read something that isn't + t.teardown(mutateFS.statMutate((er, st) => { + if (er) + return [er, st] + st.isFile = () => true + st.size = 123 + })) t.throws(_ => new WriteEntry.Sync('fixtures', { cwd: __dirname, }), expect) @@ -571,7 +586,7 @@ t.test('read invalid EOF', t => { t.teardown(mutateFS.mutate('read', (er, br) => [er, 0])) const expect = { message: 'encountered unexpected EOF', - path: __filename, + path: normPath(__filename), syscall: 'read', code: 'EOF', } @@ -591,7 +606,7 @@ t.test('read overflow expectation', t => { const f = '512-bytes.txt' const expect = { message: 'did not encounter expected EOF', - path: path.resolve(files, f), + path: normPath(path.resolve(files, f)), syscall: 'read', code: 'EOF', } @@ -948,7 +963,7 @@ t.test('write entry from read entry', t => { path: '$', type: 'File', size: 10, - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, uid: 123, gid: 321, ctime: new Date('1979-07-01'), @@ -989,7 +1004,7 @@ t.test('write entry from read entry', t => { path: '$', type: 'Directory', size: 0, - mode: 0o755, + mode: isWindows ? 0o777 : 0o755, uid: 123, gid: 321, ctime: new Date('1979-07-01'), @@ -1673,3 +1688,15 @@ t.test('hard links from tar entries and no prefix', t => { t.end() }) + +t.test('myuid set by getuid() if available, otherwise 0', t => { + const {getuid} = process + process.getuid = null + const noUid = new WriteEntry(__filename) + t.equal(noUid.myuid, 0, 'set to zero if no getuid function') + process.getuid = () => 123456789 + const hasUid = new WriteEntry(__filename) + t.equal(hasUid.myuid, 123456789, 'set to process.getuid()') + process.getuid = getuid + t.end() +})