From 114c7ac4c24441cbe13de4aa46a864d46676c6bc Mon Sep 17 00:00:00 2001 From: isaacs Date: Mon, 8 Apr 2024 15:34:14 -0700 Subject: [PATCH 01/12] first pass TS refactor --- .github/workflows/typedoc.yml | 50 ++ .gitignore | 2 +- README.md | 1 + lib/path-reservations.js | 39 +- package.json | 233 +++++-- src/create.ts | 160 +++++ src/cwd-error.ts | 15 + src/extract.ts | 165 +++++ src/get-write-flag.ts | 29 + src/header.ts | 397 ++++++++++++ src/index.ts | 21 + src/large-numbers.ts | 98 +++ src/list.ts | 185 ++++++ src/mkdir.ts | 292 +++++++++ src/mode-fix.ts | 26 + src/normalize-unicode.ts | 12 + src/normalize-windows-path.ts | 12 + src/options.ts | 473 ++++++++++++++ src/pack.ts | 500 +++++++++++++++ src/parse.ts | 629 +++++++++++++++++++ src/path-reservations.ts | 192 ++++++ src/pax.ts | 181 ++++++ src/read-entry.ts | 151 +++++ src/replace.ts | 317 ++++++++++ src/strip-absolute-path.ts | 27 + src/strip-trailing-slashes.ts | 13 + src/symlink-error.ts | 14 + src/types.ts | 97 +++ src/unpack.ts | 1101 +++++++++++++++++++++++++++++++++ src/update.ts | 62 ++ src/warn-method.ts | 59 ++ src/winchars.ts | 16 + src/write-entry.ts | 733 ++++++++++++++++++++++ 33 files changed, 6248 insertions(+), 54 deletions(-) create mode 100644 .github/workflows/typedoc.yml create mode 100644 src/create.ts create mode 100644 src/cwd-error.ts create mode 100644 src/extract.ts create mode 100644 src/get-write-flag.ts create mode 100644 src/header.ts create mode 100644 src/index.ts create mode 100644 src/large-numbers.ts create mode 100644 src/list.ts create mode 100644 src/mkdir.ts create mode 100644 src/mode-fix.ts create mode 100644 src/normalize-unicode.ts create mode 100644 src/normalize-windows-path.ts create mode 100644 src/options.ts create mode 100644 src/pack.ts create mode 100644 src/parse.ts create mode 100644 src/path-reservations.ts create mode 100644 src/pax.ts create mode 100644 src/read-entry.ts create mode 100644 src/replace.ts create mode 100644 src/strip-absolute-path.ts create mode 100644 src/strip-trailing-slashes.ts create mode 100644 src/symlink-error.ts create mode 100644 src/types.ts create mode 100644 src/unpack.ts create mode 100644 src/update.ts create mode 100644 src/warn-method.ts create mode 100644 src/winchars.ts create mode 100644 src/write-entry.ts diff --git a/.github/workflows/typedoc.yml b/.github/workflows/typedoc.yml new file mode 100644 index 00000000..e5bc0ef8 --- /dev/null +++ b/.github/workflows/typedoc.yml @@ -0,0 +1,50 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Deploy static content to Pages + +on: + # Runs on pushes targeting the default branch + push: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Use Nodejs ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: 18.x + - name: Install dependencies + run: npm install + - name: Generate typedocs + run: npm run typedoc + + - name: Setup Pages + uses: actions/configure-pages@v3 + - name: Upload artifact + uses: actions/upload-pages-artifact@v1 + with: + path: './docs' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 diff --git a/.gitignore b/.gitignore index effd9b9a..44be827b 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ # keep these !**/.gitignore +!/src !/.commitlintrc.js !/.eslintrc.js !/.eslintrc.local.* @@ -15,7 +16,6 @@ !/bin/ !/CHANGELOG* !/CODE_OF_CONDUCT.md -!/docs/ !/index.js !/lib/ !/LICENSE* diff --git a/README.md b/README.md index f620568e..296229c5 100644 --- a/README.md +++ b/README.md @@ -630,6 +630,7 @@ The following options are supported: default" for most unix systems, based on a `umask` value of `0o22`. - `preservePaths` Allow absolute paths. By default, `/` is stripped from absolute paths. + - `linkCache` A Map object containing the device and inode value for any file whose nlink is > 1, to identify hard links. - `statCache` A Map object that caches calls `lstat`. diff --git a/lib/path-reservations.js b/lib/path-reservations.js index 8d349d58..62890060 100644 --- a/lib/path-reservations.js +++ b/lib/path-reservations.js @@ -11,7 +11,8 @@ const normalize = require('./normalize-unicode.js') const stripSlashes = require('./strip-trailing-slashes.js') const { join } = require('path') -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform const isWindows = platform === 'win32' module.exports = () => { @@ -26,13 +27,16 @@ module.exports = () => { // return a set of parent dirs for a given path // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] const getDirs = path => { - const dirs = path.split('/').slice(0, -1).reduce((set, path) => { - if (set.length) { - path = join(set[set.length - 1], path) - } - set.push(path || '/') - return set - }, []) + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set, path) => { + if (set.length) { + path = join(set[set.length - 1], path) + } + set.push(path || '/') + return set + }, []) return dirs } @@ -57,8 +61,10 @@ module.exports = () => { // included in the first set for all its dir queues const check = fn => { const { paths, dirs } = getQueues(fn) - return paths.every(q => q[0] === fn) && - dirs.every(q => q[0] instanceof Set && q[0].has(fn)) + return ( + paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)) + ) } // run the function if it's first in line and not already running @@ -102,7 +108,6 @@ module.exports = () => { } else if (q[0].size === 1) { q.shift() - // must be a function or else the Set would've been reused next.add(q[0]) } else { q[0].delete(fn) @@ -121,13 +126,15 @@ module.exports = () => { // disk, without asking the kernel for a shortname. // So, we just pretend that every path matches every other path here, // effectively removing all parallelization on windows. - paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { - // don't need normPath, because we skip this entirely for windows - return stripSlashes(join(normalize(p))).toLowerCase() - }) + paths = isWindows + ? ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return stripSlashes(join(normalize(p))).toLowerCase() + }) const dirs = new Set( - paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)) + paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)), ) reservations.set(fn, { dirs, paths }) paths.forEach(path => { diff --git a/package.json b/package.json index f84a41cc..7cfcb4f5 100644 --- a/package.json +++ b/package.json @@ -10,61 +10,220 @@ "scripts": { "genparse": "node scripts/generate-parse-fixtures.js", "snap": "tap", - "test": "tap" + "test": "tap", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "tshy", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" }, "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", "chmodr": "^1.2.0", "end-of-stream": "^1.4.3", "events-to-array": "^2.0.3", "mutate-fs": "^2.1.1", "nock": "^13.2.9", + "prettier": "^3.2.5", "rimraf": "^3.0.2", - "tap": "^16.0.1" + "tap": "^16.0.1", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" }, "license": "ISC", "engines": { - "node": ">=10" + "node": ">=18" }, "files": [ - "bin/", - "lib/", - "index.js" + "dist" ], "tap": { "coverage-map": "map.js", - "timeout": 0, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] + "timeout": 0 }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0", - "content": "scripts/template-oss", - "engines": ">=10", - "distPaths": [ - "index.js" - ], - "allowPaths": [ - "/index.js" - ], - "ciVersions": [ - "10.x", - "12.x", - "14.x", - "16.x", - "18.x" - ] - } + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts", + "./create": "./src/create.ts", + "./replace": "./src/create.ts", + "./list": "./src/list.ts", + "./update": "./src/update.ts", + "./extract": "./src/extract.ts", + "./pack": "./src/pack.ts", + "./unpack": "./src/unpack.ts", + "./parse": "./src/parse.ts", + "./read-entry": "./src/read-entry.ts", + "./write-entry": "./src/write-entry.ts", + "./header": "./src/header.ts", + "./pax": "./src/pax.ts", + "./types": "./src/types.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + }, + "./create": { + "import": { + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./replace": { + "import": { + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./list": { + "import": { + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./update": { + "import": { + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./extract": { + "import": { + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./pack": { + "import": { + "types": "./dist/esm/pack.d.ts", + "default": "./dist/esm/pack.js" + }, + "require": { + "types": "./dist/commonjs/pack.d.ts", + "default": "./dist/commonjs/pack.js" + } + }, + "./unpack": { + "import": { + "types": "./dist/esm/unpack.d.ts", + "default": "./dist/esm/unpack.js" + }, + "require": { + "types": "./dist/commonjs/unpack.d.ts", + "default": "./dist/commonjs/unpack.js" + } + }, + "./parse": { + "import": { + "types": "./dist/esm/parse.d.ts", + "default": "./dist/esm/parse.js" + }, + "require": { + "types": "./dist/commonjs/parse.d.ts", + "default": "./dist/commonjs/parse.js" + } + }, + "./read-entry": { + "import": { + "types": "./dist/esm/read-entry.d.ts", + "default": "./dist/esm/read-entry.js" + }, + "require": { + "types": "./dist/commonjs/read-entry.d.ts", + "default": "./dist/commonjs/read-entry.js" + } + }, + "./write-entry": { + "import": { + "types": "./dist/esm/write-entry.d.ts", + "default": "./dist/esm/write-entry.js" + }, + "require": { + "types": "./dist/commonjs/write-entry.d.ts", + "default": "./dist/commonjs/write-entry.js" + } + }, + "./header": { + "import": { + "types": "./dist/esm/header.d.ts", + "default": "./dist/esm/header.js" + }, + "require": { + "types": "./dist/commonjs/header.d.ts", + "default": "./dist/commonjs/header.js" + } + }, + "./pax": { + "import": { + "types": "./dist/esm/pax.d.ts", + "default": "./dist/esm/pax.js" + }, + "require": { + "types": "./dist/commonjs/pax.d.ts", + "default": "./dist/commonjs/pax.js" + } + }, + "./types": { + "import": { + "types": "./dist/esm/types.d.ts", + "default": "./dist/esm/types.js" + }, + "require": { + "types": "./dist/commonjs/types.d.ts", + "default": "./dist/commonjs/types.js" + } + } + }, + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts" } diff --git a/src/create.ts b/src/create.ts new file mode 100644 index 00000000..b97eb2e8 --- /dev/null +++ b/src/create.ts @@ -0,0 +1,160 @@ +import { + dealias, + isFile, + isSync, + isSyncFile, + TarOptions, + TarOptionsFile, + TarOptionsSync, + TarOptionsSyncFile, + TarOptionsWithAliases, + TarOptionsWithAliasesFile, + TarOptionsWithAliasesSync, + TarOptionsWithAliasesSyncFile, +} from './options.js' + +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass' +import { Minipass } from 'minipass' +import path from 'node:path' +import { list } from './list.js' +import { Pack, PackSync } from './pack.js' + +export function create( + opt: TarOptionsWithAliasesSyncFile, + files?: string[], +): void +export function create( + opt: TarOptionsWithAliasesSync, + files?: string[], +): void +export function create( + opt: TarOptionsWithAliasesFile, + files?: string[], + cb?: () => any, +): Promise +export function create( + opt: TarOptionsWithAliasesFile, + cb: () => any, +): Promise +export function create( + opt: TarOptionsWithAliases, + files?: string[], +): Pack +export function create( + opt_: TarOptionsWithAliases, + files?: string[] | (() => any), + cb?: () => any, +): void | Promise | Pack { + if (typeof files === 'function') { + cb = files + } + + if (Array.isArray(opt_)) { + ;(files = opt_), (opt_ = {}) + } + + if (!files || !Array.isArray(files) || !files.length) { + throw new TypeError('no files or directories specified') + } + + files = Array.from(files) + + const opt = dealias(opt_) + + if (opt.sync && typeof cb === 'function') { + throw new TypeError( + 'callback not supported for sync tar functions', + ) + } + + if (!opt.file && typeof cb === 'function') { + throw new TypeError('callback only supported with file option') + } + + return isSyncFile(opt) + ? createFileSync(opt, files) + : isFile(opt) + ? createFile(opt, files, cb) + : isSync(opt) + ? createSync(opt, files) + : create_(opt, files) +} + +const createFileSync = (opt: TarOptionsSyncFile, files: string[]) => { + const p = new PackSync(opt) + const stream = new WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }) + p.pipe(stream as unknown as Minipass.Writable) + addFilesSync(p, files) +} + +const createFile = ( + opt: TarOptionsFile, + files: string[], + cb?: () => any, +) => { + const p = new Pack(opt) + const stream = new WriteStream(opt.file, { + mode: opt.mode || 0o666, + }) + p.pipe(stream as unknown as Minipass.Writable) + + const promise = new Promise((res, rej) => { + stream.on('error', rej) + stream.on('close', res) + p.on('error', rej) + }) + + addFilesAsync(p, files) + + return cb ? promise.then(cb, cb) : promise +} + +const addFilesSync = (p: PackSync, files: string[]) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onentry: entry => p.add(entry), + }) + } else { + p.add(file) + } + }) + p.end() +} + +const addFilesAsync = async ( + p: Pack, + files: string[], + i = 0, +): Promise => { + for (; i < files.length; i++) { + const file = String(files[i]) + if (file.charAt(0) === '@') { + return list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onentry: entry => p.add(entry), + }).then(_ => addFilesAsync(p, files)) + } else { + p.add(file) + } + } + p.end() +} + +const createSync = (opt: TarOptionsSync, files: string[]) => { + const p = new PackSync(opt) + addFilesSync(p, files) + return p +} + +const create_ = (opt: TarOptions, files: string[]) => { + const p = new Pack(opt) + addFilesAsync(p, files) + return p +} diff --git a/src/cwd-error.ts b/src/cwd-error.ts new file mode 100644 index 00000000..7a708ed4 --- /dev/null +++ b/src/cwd-error.ts @@ -0,0 +1,15 @@ +export class CwdError extends Error { + path: string + code: string + syscall: 'chdir' = 'chdir' + + constructor(path: string, code: string) { + super(code + ": Cannot cd into '" + path + "'") + this.path = path + this.code = code + } + + get name() { + return 'CwdError' + } +} diff --git a/src/extract.ts b/src/extract.ts new file mode 100644 index 00000000..f79f9408 --- /dev/null +++ b/src/extract.ts @@ -0,0 +1,165 @@ +// tar -x +import * as fsm from '@isaacs/fs-minipass' +import fs from 'node:fs' +import { dirname, parse } from 'node:path' +import { + dealias, + isFile, + isSync, + isSyncFile, + TarOptions, + TarOptionsFile, + TarOptionsSync, + TarOptionsSyncFile, + TarOptionsWithAliases, + TarOptionsWithAliasesFile, + TarOptionsWithAliasesSync, + TarOptionsWithAliasesSyncFile, +} from './options.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' +import { Unpack, UnpackSync } from './unpack.js' + +export function extract( + opt: TarOptionsWithAliasesSyncFile, + files?: string[], +): void +export function extract( + opt: TarOptionsWithAliasesSync, + files?: string[], +): void +export function extract( + opt: TarOptionsWithAliasesFile, + files?: string[], + cb?: () => any, +): Promise +export function extract( + opt: TarOptionsWithAliasesFile, + cb: () => any, +): Promise +export function extract( + opt: TarOptionsWithAliases, + files?: string[], +): Unpack +export function extract( + opt_: TarOptionsWithAliases, + files?: string[] | (() => any), + cb?: () => any, +): void | Promise | Unpack { + if (typeof opt_ === 'function') { + ;(cb = opt_), (files = undefined), (opt_ = {}) + } else if (Array.isArray(opt_)) { + ;(files = opt_), (opt_ = {}) + } + + if (typeof files === 'function') { + ;(cb = files), (files = undefined) + } + + if (!files) { + files = [] + } else { + files = Array.from(files) + } + + const opt = dealias(opt_) + + if (opt.sync && typeof cb === 'function') { + throw new TypeError( + 'callback not supported for sync tar functions', + ) + } + + if (!opt.file && typeof cb === 'function') { + throw new TypeError('callback only supported with file option') + } + + if (files.length) { + filesFilter(opt, files) + } + + return isSyncFile(opt) + ? extractFileSync(opt) + : isFile(opt) + ? extractFile(opt, cb) + : isSync(opt) + ? extractSync(opt) + : extract_(opt) +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt: TarOptions, files: string[]) => { + const map = new Map(files.map(f => [stripTrailingSlashes(f), true])) + const filter = opt.filter + + const mapHas = (file: string, r: string = ''): boolean => { + const root = r || parse(file).root || '.' + let ret: boolean + if (file === root) ret = false + else { + const m = map.get(file) + if (m !== undefined) { + ret = m + } else { + ret = mapHas(dirname(file), root) + } + } + + map.set(file, ret) + return ret + } + + opt.filter = filter + ? (file, entry) => + filter(file, entry) && mapHas(stripTrailingSlashes(file)) + : file => mapHas(stripTrailingSlashes(file)) +} + +const extractFileSync = (opt: TarOptionsSyncFile) => { + const u = new UnpackSync(opt) + + const file = opt.file + const stat = fs.statSync(file) + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }) + stream.pipe(u) +} + +const extractFile = ( + opt: TarOptionsFile, + cb: () => void = () => {}, +) => { + const u = new Unpack(opt) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + u.on('error', reject) + u.on('close', resolve) + + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) { + reject(er) + } else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }) + stream.on('error', reject) + stream.pipe(u) + } + }) + }) + return cb ? p.then(cb, cb) : p +} + +const extractSync = (opt: TarOptionsSync) => new UnpackSync(opt) + +const extract_ = (opt: TarOptions) => new Unpack(opt) diff --git a/src/get-write-flag.ts b/src/get-write-flag.ts new file mode 100644 index 00000000..db358591 --- /dev/null +++ b/src/get-write-flag.ts @@ -0,0 +1,29 @@ +// Get the appropriate flag to use for creating files +// We use fmap on Windows platforms for files less than +// 512kb. This is a fairly low limit, but avoids making +// things slower in some cases. Since most of what this +// library is used for is extracting tarballs of many +// relatively small files in npm packages and the like, +// it can be a big boost on Windows platforms. +// Only supported in Node v12.9.0 and above. +const platform = process.env.__FAKE_PLATFORM__ || process.platform +const isWindows = platform === 'win32' +const g = globalThis as typeof globalThis & { + __FAKE_TESTING_FS__: typeof import('fs') +} +const fs = g.__FAKE_TESTING_FS__ || require('fs') + +/* istanbul ignore next */ +const { + O_CREAT, + O_TRUNC, + O_WRONLY, + UV_FS_O_FILEMAP = 0, +} = fs.constants + +const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP +const fMapLimit = 512 * 1024 +const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY +export const getWriteFlag = !fMapEnabled + ? () => 'w' + : (size: number) => (size < fMapLimit ? fMapFlag : 'w') diff --git a/src/header.ts b/src/header.ts new file mode 100644 index 00000000..b9efbda6 --- /dev/null +++ b/src/header.ts @@ -0,0 +1,397 @@ +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) + +import { posix as pathModule } from 'node:path' +import * as large from './large-numbers.js' +import type { EntryTypeCode, EntryTypeName } from './types.js' +import * as types from './types.js' + +export type HeaderData = { + path?: string + mode?: number + uid?: number + gid?: number + size?: number + cksum?: number + type?: EntryTypeCode | EntryTypeName + linkpath?: string + uname?: string + gname?: string + devmaj?: number + devmin?: number + atime?: Date + ctime?: Date + mtime?: Date + + // fields that are common in extended PAX headers, but not in the + // "standard" tar header block + charset?: string + comment?: string + dev?: number + ino?: number + nlink?: number +} + +export class Header implements HeaderData { + cksumValid: boolean = false + needPax: boolean = false + nullBlock: boolean = false + + block?: Buffer + path?: string + mode?: number + uid?: number + gid?: number + size?: number + cksum?: number + #type: EntryTypeCode = '0' + linkpath?: string + uname?: string + gname?: string + devmaj: number = 0 + devmin: number = 0 + atime?: Date + ctime?: Date + mtime?: Date + + charset?: string + comment?: string + + constructor( + data?: Buffer | HeaderData, + off: number = 0, + ex?: HeaderData, + gex?: HeaderData, + ) { + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex) + } else if (data) { + this.#slurp(data) + } + } + + decode( + buf: Buffer, + off: number, + ex?: HeaderData, + gex?: HeaderData, + ) { + if (!off) { + off = 0 + } + + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header') + } + + this.path = decString(buf, off, 100) + this.mode = decNumber(buf, off + 100, 8) + this.uid = decNumber(buf, off + 108, 8) + this.gid = decNumber(buf, off + 116, 8) + this.size = decNumber(buf, off + 124, 12) + this.mtime = decDate(buf, off + 136, 12) + this.cksum = decNumber(buf, off + 148, 12) + + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + if (ex) this.#slurp(ex) + if (gex) this.#slurp(gex, true) + + // old tar versions marked dirs as a file with a trailing / + const t = decString(buf, off + 156, 1) + if (types.isCode(t)) this.#type = t + else this.#type = '0' + if (this.#type === '') { + this.#type = '0' + } + if (this.#type === '0' && this.path.slice(-1) === '/') { + this.#type = '5' + } + + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this.#type === '5') { + this.size = 0 + } + + this.linkpath = decString(buf, off + 157, 100) + if ( + buf.subarray(off + 257, off + 265).toString() === + 'ustar\u000000' + ) { + this.uname = decString(buf, off + 265, 32) + this.gname = decString(buf, off + 297, 32) + this.devmaj = decNumber(buf, off + 329, 8) ?? 0 + this.devmin = decNumber(buf, off + 337, 8) ?? 0 + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155) + this.path = prefix + '/' + this.path + } else { + const prefix = decString(buf, off + 345, 130) + if (prefix) { + this.path = prefix + '/' + this.path + } + this.atime = decDate(buf, off + 476, 12) + this.ctime = decDate(buf, off + 488, 12) + } + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] as number + } + + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] as number + } + + this.cksumValid = sum === this.cksum + if (this.cksum === null && sum === 8 * 0x20) { + this.nullBlock = true + } + } + + #slurp(ex: HeaderData, gex: boolean = false) { + Object.assign( + this, + Object.fromEntries( + Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !( + v === null || + v === undefined || + (k === 'path' && gex) + ) + }), + ), + ) + } + + encode(buf?: Buffer, off: number = 0) { + if (!buf) { + buf = this.block = Buffer.alloc(512) + } + + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header') + } + + const prefixSize = this.ctime || this.atime ? 130 : 155 + const split = splitPrefix(this.path || '', prefixSize) + const path = split[0] + const prefix = split[1] + this.needPax = !!split[2] + + this.needPax = encString(buf, off, 100, path) || this.needPax + this.needPax = + encNumber(buf, off + 100, 8, this.mode) || this.needPax + this.needPax = + encNumber(buf, off + 108, 8, this.uid) || this.needPax + this.needPax = + encNumber(buf, off + 116, 8, this.gid) || this.needPax + this.needPax = + encNumber(buf, off + 124, 12, this.size) || this.needPax + this.needPax = + encDate(buf, off + 136, 12, this.mtime) || this.needPax + buf[off + 156] = this.#type.charCodeAt(0) + this.needPax = + encString(buf, off + 157, 100, this.linkpath) || this.needPax + buf.write('ustar\u000000', off + 257, 8) + this.needPax = + encString(buf, off + 265, 32, this.uname) || this.needPax + this.needPax = + encString(buf, off + 297, 32, this.gname) || this.needPax + this.needPax = + encNumber(buf, off + 329, 8, this.devmaj) || this.needPax + this.needPax = + encNumber(buf, off + 337, 8, this.devmin) || this.needPax + this.needPax = + encString(buf, off + 345, prefixSize, prefix) || this.needPax + if (buf[off + 475] !== 0) { + this.needPax = + encString(buf, off + 345, 155, prefix) || this.needPax + } else { + this.needPax = + encString(buf, off + 345, 130, prefix) || this.needPax + this.needPax = + encDate(buf, off + 476, 12, this.atime) || this.needPax + this.needPax = + encDate(buf, off + 488, 12, this.ctime) || this.needPax + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] as number + } + + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] as number + } + + this.cksum = sum + encNumber(buf, off + 148, 8, this.cksum) + this.cksumValid = true + + return this.needPax + } + + get type(): EntryTypeName { + return types.name.get(this.#type) as EntryTypeName + } + + get typeKey(): EntryTypeCode { + return this.#type + } + + set type(type: EntryTypeCode | EntryTypeName) { + const c = String(types.code.get(type as EntryTypeName)) + if (types.isCode(c)) { + this.#type = c + } else if (types.isCode(type)) { + this.#type = type + } else { + throw new TypeError('invalid entry type: ' + type) + } + } +} + +const splitPrefix = ( + p: string, + prefixSize: number, +): [string, string, boolean] => { + const pathSize = 100 + let pp = p + let prefix = '' + let ret: undefined | [string, string, boolean] = undefined + const root = pathModule.parse(p).root || '.' + + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false] + } else { + // first set prefix to the dir, and path to the base + prefix = pathModule.dirname(pp) + pp = pathModule.basename(pp) + + do { + if ( + Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize + ) { + // both fit! + ret = [pp, prefix, false] + } else if ( + Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize + ) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true] + } else { + // make path take a bit from prefix + pp = pathModule.join(pathModule.basename(prefix), pp) + prefix = pathModule.dirname(prefix) + } + } while (prefix !== root && ret === undefined) + + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true] + } + } + return ret +} + +const decString = (buf: Buffer, off: number, size: number) => + buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*/, '') + +const decDate = (buf: Buffer, off: number, size: number) => + numToDate(decNumber(buf, off, size)) + +const numToDate = (num?: number) => + num === undefined ? undefined : new Date(num * 1000) + +const decNumber = (buf: Buffer, off: number, size: number) => + Number(buf[off]) & 0x80 + ? large.parse(buf.subarray(off, off + size)) + : decSmallNumber(buf, off, size) + +const nanUndef = (value: number) => (isNaN(value) ? undefined : value) + +const decSmallNumber = (buf: Buffer, off: number, size: number) => + nanUndef( + parseInt( + buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*$/, '') + .trim(), + 8, + ), + ) + +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +} + +const encNumber = ( + buf: Buffer, + off: number, + size: 12 | 8, + num?: number, +) => + num === undefined + ? false + : num > MAXNUM[size] || num < 0 + ? (large.encode(num, buf.subarray(off, off + size)), true) + : (encSmallNumber(buf, off, size, num), false) + +const encSmallNumber = ( + buf: Buffer, + off: number, + size: number, + num: number, +) => buf.write(octalString(num, size), off, size, 'ascii') + +const octalString = (num: number, size: number) => + padOctal(Math.floor(num).toString(8), size) + +const padOctal = (str: string, size: number) => + (str.length === size - 1 + ? str + : new Array(size - str.length - 1).join('0') + str + ' ') + '\0' + +const encDate = ( + buf: Buffer, + off: number, + size: 8 | 12, + date?: Date, +) => + date === undefined + ? false + : encNumber(buf, off, size, date.getTime() / 1000) + +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0') +// pad with nulls, return true if it's longer or non-ascii +const encString = ( + buf: Buffer, + off: number, + size: number, + str?: string, +) => + str === undefined + ? false + : (buf.write(str + NULLS, off, size, 'utf8'), + str.length !== Buffer.byteLength(str) || str.length > size) diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..bc2c67fe --- /dev/null +++ b/src/index.ts @@ -0,0 +1,21 @@ +export * from './create.js' +export * from './replace.js' +export * from './list.js' +export * from './update.js' +export * from './extract.js' + +export { create as c } from './create.js' +export { replace as r } from './replace.js' +export { list as t } from './list.js' +export { update as u } from './update.js' +export { extract as x } from './extract.js' + +// classes +export * from './pack.js' +export * from './unpack.js' +export * from './parse.js' +export * from './read-entry.js' +export * from './write-entry.js' +export * from './header.js' +export * from './pax.js' +export * as types from './types.js' diff --git a/src/large-numbers.ts b/src/large-numbers.ts new file mode 100644 index 00000000..cd62726d --- /dev/null +++ b/src/large-numbers.ts @@ -0,0 +1,98 @@ +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. + +export const encode = (num: number, buf: Buffer) => { + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('cannot encode number outside of javascript safe integer range') + } else if (num < 0) { + encodeNegative(num, buf) + } else { + encodePositive(num, buf) + } + return buf +} + +const encodePositive = (num: number, buf: Buffer) => { + buf[0] = 0x80 + + for (var i = buf.length; i > 1; i--) { + buf[i - 1] = num & 0xff + num = Math.floor(num / 0x100) + } +} + +const encodeNegative = (num: number, buf: Buffer) => { + buf[0] = 0xff + var flipped = false + num = num * -1 + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff + num = Math.floor(num / 0x100) + if (flipped) { + buf[i - 1] = onesComp(byte) + } else if (byte === 0) { + buf[i - 1] = 0 + } else { + flipped = true + buf[i - 1] = twosComp(byte) + } + } +} + +export const parse = (buf: Buffer) => { + const pre = buf[0] + const value = pre === 0x80 ? pos(buf.subarray(1, buf.length)) + : pre === 0xff ? twos(buf) + : null + if (value === null) { + throw Error('invalid base256 encoding') + } + + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('parsed number outside of javascript safe integer range') + } + + return value +} + +const twos = (buf: Buffer) => { + var len = buf.length + var sum = 0 + var flipped = false + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]) + var f + if (flipped) { + f = onesComp(byte) + } else if (byte === 0) { + f = byte + } else { + flipped = true + f = twosComp(byte) + } + if (f !== 0) { + sum -= f * Math.pow(256, len - i - 1) + } + } + return sum +} + +const pos = (buf: Buffer) => { + var len = buf.length + var sum = 0 + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]) + if (byte !== 0) { + sum += byte * Math.pow(256, len - i - 1) + } + } + return sum +} + +const onesComp = (byte: number) => (0xff ^ byte) & 0xff + +const twosComp = (byte: number) => ((0xff ^ byte) + 1) & 0xff diff --git a/src/list.ts b/src/list.ts new file mode 100644 index 00000000..a31a4397 --- /dev/null +++ b/src/list.ts @@ -0,0 +1,185 @@ +// tar -t +import * as fsm from '@isaacs/fs-minipass' +import fs from 'node:fs' +import { dirname, parse } from 'path' +import { + dealias, + isFile, + isSyncFile, + TarOptions, + TarOptionsFile, + TarOptionsWithAliases, + TarOptionsWithAliasesFile, + TarOptionsWithAliasesSync, + TarOptionsWithAliasesSyncFile, +} from './options.js' +import { Parser } from './parse.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' + +export function list( + opt: TarOptionsWithAliasesSyncFile, + files?: string[], +): void +export function list( + opt: TarOptionsWithAliasesSync, + files?: string[], +): void +export function list( + opt: TarOptionsWithAliasesFile, + files?: string[], + cb?: () => any, +): Promise +export function list( + opt: TarOptionsWithAliasesFile, + cb: () => any, +): Promise +export function list( + opt: TarOptionsWithAliases, + files?: string[], +): Parser +export function list( + opt_: TarOptionsWithAliases, + files?: string[] | (() => any), + cb?: () => any, +): void | Promise | Parser { + if (typeof opt_ === 'function') { + ;(cb = opt_), (files = undefined), (opt_ = {}) + } else if (Array.isArray(opt_)) { + ;(files = opt_), (opt_ = {}) + } + + if (typeof files === 'function') { + ;(cb = files), (files = undefined) + } + + if (!files) { + files = [] + } else { + files = Array.from(files) + } + + const opt = dealias(opt_) + + if (opt.sync && typeof cb === 'function') { + throw new TypeError( + 'callback not supported for sync tar functions', + ) + } + + if (!opt.file && typeof cb === 'function') { + throw new TypeError('callback only supported with file option') + } + + if (files.length) { + filesFilter(opt, files) + } + + if (!opt.noResume) { + onentryFunction(opt) + } + + return isSyncFile(opt) + ? listFileSync(opt) + : isFile(opt) + ? listFile(opt, cb) + : list_(opt) +} + +const onentryFunction = (opt: TarOptions) => { + const onentry = opt.onentry + opt.onentry = onentry + ? e => { + onentry(e) + e.resume() + } + : e => e.resume() +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt: TarOptions, files: string[]) => { + const map = new Map( + files.map(f => [stripTrailingSlashes(f), true]), + ) + const filter = opt.filter + + const mapHas = (file: string, r: string = ''): boolean => { + const root = r || parse(file).root || '.' + let ret: boolean + if (file === root) ret = false + else { + const m = map.get(file) + if (m !== undefined) { + ret = m + } else { + ret = mapHas(dirname(file), root) + } + } + + map.set(file, ret) + return ret + } + + opt.filter = filter + ? (file, entry) => + filter(file, entry) && mapHas(stripTrailingSlashes(file)) + : file => mapHas(stripTrailingSlashes(file)) +} + +const listFileSync = (opt: TarOptionsWithAliasesSyncFile) => { + const p = list_(opt) + const file = opt.file + let threw = true + let fd + try { + const stat = fs.statSync(file) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + if (stat.size < readSize) { + p.end(fs.readFileSync(file)) + } else { + let pos = 0 + const buf = Buffer.allocUnsafe(readSize) + fd = fs.openSync(file, 'r') + while (pos < stat.size) { + const bytesRead = fs.readSync(fd, buf, 0, readSize, pos) + pos += bytesRead + p.write(buf.subarray(0, bytesRead)) + } + p.end() + } + threw = false + } finally { + if (threw && fd) { + try { + fs.closeSync(fd) + } catch (er) {} + } + } +} + +const listFile = (opt: TarOptionsFile, cb?: () => void) => { + const parse = new Parser(opt) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + parse.on('error', reject) + parse.on('end', resolve) + + fs.stat(file, (er, stat) => { + if (er) { + reject(er) + } else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }) + stream.on('error', reject) + stream.pipe(parse) + } + }) + }) + return cb ? p.then(cb, cb) : p +} + +const list_ = (opt: TarOptions) => new Parser(opt) diff --git a/src/mkdir.ts b/src/mkdir.ts new file mode 100644 index 00000000..6c84654b --- /dev/null +++ b/src/mkdir.ts @@ -0,0 +1,292 @@ +import { chownr, chownrSync } from 'chownr' +import fs from 'fs' +import { mkdirp, mkdirpSync } from 'mkdirp' +import path from 'node:path' +import { CwdError } from './cwd-error.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { SymlinkError } from './symlink-error.js' + +export type MkdirOptions = { + uid?: number + gid?: number + processUid?: number + processGid?: number + umask?: number + preserve: boolean + unlink: boolean + cache: Map + cwd: string + mode: number + noChmod: boolean +} + +export type MkdirError = + | NodeJS.ErrnoException + | CwdError + | SymlinkError + +const cGet = (cache: Map, key: string) => + cache.get(normalizeWindowsPath(key)) +const cSet = ( + cache: Map, + key: string, + val: boolean, +) => cache.set(normalizeWindowsPath(key), val) + +const checkCwd = ( + dir: string, + cb: (er?: null | MkdirError) => any, +) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new CwdError( + dir, + (er as NodeJS.ErrnoException)?.code || 'ENOTDIR', + ) + } + cb(er) + }) +} + +/** + * Wrapper around mkdirp for tar's needs. + * + * The main purpose is to avoid creating directories if we know that + * they already exist (and track which ones exist for this purpose), + * and prevent entries from being extracted into symlinked folders, + * if `preservePaths` is not set. + */ +export const mkdir = ( + dir: string, + opt: MkdirOptions, + cb: (er?: null | MkdirError, made?: string) => void, +) => { + dir = normalizeWindowsPath(dir) + + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + const umask = opt.umask ?? 0o22 + const mode = opt.mode | 0o0700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = + typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = normalizeWindowsPath(opt.cwd) + + const done = (er?: null | MkdirError, created?: string) => { + if (er) { + cb(er) + } else { + cSet(cache, dir, true) + if (created && doChown) { + chownr(created, uid, gid, er => + done(er as NodeJS.ErrnoException), + ) + } else if (needChmod) { + fs.chmod(dir, mode, cb) + } else { + cb() + } + } + } + + if (cache && cGet(cache, dir) === true) { + return done() + } + + if (dir === cwd) { + return checkCwd(dir, done) + } + + if (preserve) { + return mkdirp(dir, { mode }).then( + made => done(null, made ?? undefined), // oh, ts + done, + ) + } + + const sub = normalizeWindowsPath(path.relative(cwd, dir)) + const parts = sub.split('/') + mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done) +} + +const mkdir_ = ( + base: string, + parts: string[], + mode: number, + cache: Map, + unlink: boolean, + cwd: string, + created: string | undefined, + cb: (er?: null | MkdirError, made?: string) => void, +): void => { + if (!parts.length) { + return cb(null, created) + } + const p = parts.shift() + const part = normalizeWindowsPath(path.resolve(base + '/' + p)) + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } + fs.mkdir( + part, + mode, + onmkdir(part, parts, mode, cache, unlink, cwd, created, cb), + ) +} + +const onmkdir = + ( + part: string, + parts: string[], + mode: number, + cache: Map, + unlink: boolean, + cwd: string, + created: string | undefined, + cb: (er?: null | MkdirError, made?: string) => void, + ) => + (er?: null | NodeJS.ErrnoException) => { + if (er) { + fs.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = + statEr.path && normalizeWindowsPath(statEr.path) + cb(statEr) + } else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } else if (unlink) { + fs.unlink(part, er => { + if (er) { + return cb(er) + } + fs.mkdir( + part, + mode, + onmkdir( + part, + parts, + mode, + cache, + unlink, + cwd, + created, + cb, + ), + ) + }) + } else if (st.isSymbolicLink()) { + return cb( + new SymlinkError(part, part + '/' + parts.join('/')), + ) + } else { + cb(er) + } + }) + } else { + created = created || part + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } + } + +const checkCwdSync = (dir: string) => { + let ok = false + let code: string | undefined = undefined + try { + ok = fs.statSync(dir).isDirectory() + } catch (er) { + code = (er as NodeJS.ErrnoException)?.code + } finally { + if (!ok) { + throw new CwdError(dir, code ?? 'ENOTDIR') + } + } +} + +export const mkdirSync = (dir: string, opt: MkdirOptions) => { + dir = normalizeWindowsPath(dir) + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + const umask = opt.umask ?? 0o22 + const mode = opt.mode | 0o700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = + typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = normalizeWindowsPath(opt.cwd) + + const done = (created?: string | undefined) => { + cSet(cache, dir, true) + if (created && doChown) { + chownrSync(created, uid, gid) + } + if (needChmod) { + fs.chmodSync(dir, mode) + } + } + + if (cache && cGet(cache, dir) === true) { + return done() + } + + if (dir === cwd) { + checkCwdSync(cwd) + return done() + } + + if (preserve) { + return done(mkdirpSync(dir, mode) ?? undefined) + } + + const sub = normalizeWindowsPath(path.relative(cwd, dir)) + const parts = sub.split('/') + let created: string | undefined = undefined + for ( + let p = parts.shift(), part = cwd; + p && (part += '/' + p); + p = parts.shift() + ) { + part = normalizeWindowsPath(path.resolve(part)) + if (cGet(cache, part)) { + continue + } + + try { + fs.mkdirSync(part, mode) + created = created || part + cSet(cache, part, true) + } catch (er) { + const st = fs.lstatSync(part) + if (st.isDirectory()) { + cSet(cache, part, true) + continue + } else if (unlink) { + fs.unlinkSync(part) + fs.mkdirSync(part, mode) + created = created || part + cSet(cache, part, true) + continue + } else if (st.isSymbolicLink()) { + return new SymlinkError(part, part + '/' + parts.join('/')) + } + } + } + + return done(created) +} diff --git a/src/mode-fix.ts b/src/mode-fix.ts new file mode 100644 index 00000000..57bf9c98 --- /dev/null +++ b/src/mode-fix.ts @@ -0,0 +1,26 @@ +export const modeFix = (mode: number, isDir: boolean, portable: boolean) => { + mode &= 0o7777 + + // in portable mode, use the minimum reasonable umask + // if this system creates files with 0o664 by default + // (as some linux distros do), then we'll write the + // archive with 0o644 instead. Also, don't ever create + // a file that is not readable/writable by the owner. + if (portable) { + mode = (mode | 0o600) & ~0o22 + } + + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) { + mode |= 0o100 + } + if (mode & 0o40) { + mode |= 0o10 + } + if (mode & 0o4) { + mode |= 0o1 + } + } + return mode +} diff --git a/src/normalize-unicode.ts b/src/normalize-unicode.ts new file mode 100644 index 00000000..61dacf06 --- /dev/null +++ b/src/normalize-unicode.ts @@ -0,0 +1,12 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null) +const { hasOwnProperty } = Object.prototype +export const normalizeUnicode = (s: string) => { + if (!hasOwnProperty.call(normalizeCache, s)) { + normalizeCache[s] = s.normalize('NFD') + } + return normalizeCache[s] +} diff --git a/src/normalize-windows-path.ts b/src/normalize-windows-path.ts new file mode 100644 index 00000000..e452009b --- /dev/null +++ b/src/normalize-windows-path.ts @@ -0,0 +1,12 @@ +// on windows, either \ or / are valid directory separators. +// on unix, \ is a valid character in filenames. +// so, on windows, and only on windows, we replace all \ chars with /, +// so that we can use / as our one and only directory separator char. + +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform + +export const normalizeWindowsPath = + platform !== 'win32' + ? (p: string) => p + : (p: string) => p && p.replace(/\\/g, '/') diff --git a/src/options.ts b/src/options.ts new file mode 100644 index 00000000..4449b718 --- /dev/null +++ b/src/options.ts @@ -0,0 +1,473 @@ +// turn tar(1) style args like `C` into the more verbose things like `cwd` + +import { type GzipOptions, type ZlibOptions } from 'minizlib' +import { type Stats } from 'node:fs' +import { type ReadEntry } from './read-entry.js' +import { type WarnData } from './warn-method.js' + +const argmap = new Map( + [ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], + ], +) + +/** + * The options that can be provided to tar commands. + * + * Note that some of these are only relevant for certain commands, since + * they are specific to reading or writing. + * + * Aliases are provided in the {@link TarOptionsWithAliases} type. + */ +export interface TarOptions { + /** + * Perform all I/O operations synchronously. If the stream is ended + * immediately, then it will be processed entirely synchronously. + */ + sync?: boolean + + /** + * The tar file to be read and/or written. When this is set, a stream + * is not returned. Asynchronous commands will return a promise indicating + * when the operation is completed, and synchronous commands will return + * immediately. + */ + file?: string + + /** + * Treat warnings as crash-worthy errors. Defaults false. + */ + strict?: boolean + + /** + * The effective current working directory for this tar command + */ + cwd?: string + + /** + * When creating a tar archive, this can be used to compress it as well. + * Set to `true` to use the default gzip options, or customize them as + * needed. + * + * When reading, if this is unset, then the compression status will be + * inferred from the archive data. This is generally best, unless you are + * sure of the compression settings in use to create the archive, and want to + * fail if the archive doesn't match expectations. + */ + gzip?: boolean | GzipOptions + + /** + * When creating archives, preserve absolute and `..` paths in the archive, + * rather than sanitizing them under the cwd. + * + * When extracting, allow absolute paths, paths containing `..`, and + * extracting through symbolic links. By default, the root `/` is stripped + * from absolute paths (eg, turning `/x/y/z` into `x/y/z`), paths containing + * `..` are not extracted, and any file whose location would be modified by a + * symbolic link is not extracted. + * + * **WARNING** This is almost always unsafe, and must NEVER be used on + * archives from untrusted sources, such as user input, and every entry must + * be validated to ensure it is safe to write. Even if the input is not + * malicious, mistakes can cause a lot of damage! + */ + preservePaths?: boolean + + /** + * When extracting, unlink files before creating them. Without this option, + * tar overwrites existing files, which preserves existing hardlinks. With + * this option, existing hardlinks will be broken, as will any symlink that + * would affect the location of an extracted file. + */ + unlink?: boolean + + /** + * When extracting, strip the specified number of path portions from the + * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be + * extracted to `{cwd}/c/d`. + */ + strip?: number + + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ + newer?: boolean + + /** + * When extracting, do not overwrite existing files at all. + */ + keep?: boolean + + /** + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ + noMtime?: boolean + + /** + * Set the `uid` and `gid` of extracted entries to the `uid` and `gid` fields + * in the archive. Defaults to true when run as root, and false otherwise. + * + * If false, then files and directories will be set with the owner and group + * of the user running the process. This is similar to `-p` in `tar(1)`, but + * ACLs and other system-specific data is never unpacked in this + * implementation, and modes are set by default already. + */ + preserveOwner?: boolean + + /** + * Pack the targets of symbolic links rather than the link itself. + */ + follow?: boolean + + /** + * Set to `true` or an object with settings for `zlib.BrotliCompress()` to + * create a brotli-compressed archive + */ + brotli?: boolean | ZlibOptions + + /** + * A function that is called with `(path, stat)` when creating an archive, or + * `(path, entry)` when unpacking. Return true to process the file/entry, or + * false to exclude it. + */ + filter?: (path: string, entry: Stats | ReadEntry) => boolean + + /** + * A function that gets called for any warning encountered. + * + * Note: if `strict` is set, then the warning will throw, and this method + * will not be called. + */ + onwarn?: (code: string, message: string, data: WarnData) => any + + /** + * When unpacking, force all created files and directories, and all + * implicitly created directories, to be owned by the specified user id, + * regardless of the `uid` field in the archive. + * + * Cannot be used along with `preserveOwner`. Requires also setting the `gid` + * option. + */ + uid?: number + + /** + * When unpacking, force all created files and directories, and all + * implicitly created directories, to be owned by the specified group id, + * regardless of the `gid` field in the archive. + * + * Cannot be used along with `preserveOwner`. Requires also setting the `uid` + * option. + */ + gid?: number + + /** + * When extracting, provide a function that takes an `entry` object, and + * returns a stream, or any falsey value. If a stream is provided, then that + * stream's data will be written instead of the contents of the archive + * entry. If a falsey value is provided, then the entry is written to disk as + * normal. + * + * To exclude items from extraction, use the `filter` option. + * + * Note that using an asynchronous stream type with the `transform` option + * will cause undefined behavior in synchronous extractions. + * [MiniPass](http://npm.im/minipass)-based streams are designed for this use + * case. + */ + transform?: (entry: ReadEntry) => any + + /** + * The maximum depth of subfolders to extract into. This defaults to 1024. + * Anything deeper than the limit will raise a warning and skip the entry. + * Set to `Infinity` to remove the limitation. + */ + maxDepth?: number + + /** + * Do not call `chmod()` to ensure that extracted files match the entry's + * mode field. This also suppresses the call to `process.umask()` to + * determine the default umask value, since tar will extract with whatever + * mode is provided, and let the process `umask` apply normally. + */ + noChmod?: boolean + + /** + * When parsing/listing archives, `entry` streams are by default resumed + * (set into "flowing" mode) immediately after the call to `onentry()`. + * Set to suppress this behavior. + * + * Note that when this is set, the stream will never complete until the + * data is consumed somehow. + */ + noResume?: boolean + + /** + * When extracting or listing archives, this method will be called with + * each entry that is not excluded by a `filter`. + * + * Important when listing archives synchronously from a file, because there + * is otherwise no way to interact with the data! + */ + onentry?: (entry: ReadEntry) => any + + /** + * When creating archives, omit any metadata that is system-specific: + * `ctime`, `atime`, `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and + * `nlink`. Note that `mtime` is still included, because this is necessary + * for other time-based operations such as `tar.update`. Additionally, `mode` + * is set to a "reasonable default" for mose unix systems, based on an + * effective `umask` of `0o22`. + * + * This also defaults the `portable` option in the gzip configs when creating + * a compressed archive, in order to produce deterministic archives that are + * not operating-system specific. + */ + portable?: boolean + + /** + * When creating archives, do not recursively archive the contents of + * directories. By default, archiving a directory archives all of its + * contents as well. + */ + noDirRecurse?: boolean + + /** + * Suppress Pax extended headers. Note that this means long paths and + * linkpaths will be truncated, and large or negative numeric values may be + * interpreted incorrectly. + */ + noPax?: boolean + + /** + * Set to a `Date` object to force a specific `mtime` value for everything + * written to an archive. + * + * Overridden by `noMtime`. + */ + mtime?: Date + + /** + * A path portion to prefix onto the entries added to an archive. + */ + prefix?: string + + /** + * The mode to set on any created file archive, defaults to 0o666 + * masked by the process umask, often resulting in 0o644. + */ + mode?: number + + ////////////////////////// + // internal options + + /** + * A cache of mtime values, to avoid having to stat the same file repeatedly. + * @internal + */ + mtimeCache?: Map + + /** + * maximum buffer size for `fs.read()` operations. + * + * @internal + */ + maxReadSize?: number + + /** + * Filter modes of entries being unpacked, like `process.umask()` + * + * @internal + */ + umask?: number + + /** + * default mode for directories + * + * @internal + */ + dmode?: number + + /** + * default mode for files + * + * @internal + */ + fmode?: number + + /** + * Map that tracks which directories already exist, for extraction + * + * @internal + */ + dirCache?: Map + /** + * maximum supported size of meta entries. Defaults to 1MB + * + * @internal + */ + maxMetaEntrySize?: number + + /** + * A Map object containing the device and inode value for any file whose + * `nlink` value is greater than 1, to identify hard links when creating + * archives. + * + * @internal + */ + linkCache?: Map + + /** + * A map object containing the results of `fs.readdir()` calls. + * + * @internal + */ + readdirCache?: Map + + /** + * A cache of all `lstat` results, for use in creating archives. + * + * @internal + */ + statCache?: Map + + /** + * Number of concurrent jobs to run when creating archives. + * + * Defaults to 4. + * + * @internal + */ + jobs?: number + + /** + * Automatically set to true on Windows systems. + * + * When unpacking, causes behavior where filenames containing `<|>?:` + * characters are converted to windows-compatible escape sequences in the + * created filesystem entries. + * + * When packing, causes behavior where paths replace `\` with `/`, and + * filenames containing the windows-compatible escaped forms of `<|>?:` are + * converted to actual `<|>?:` characters in the archive. + * + * @internal + */ + win32?: boolean + + /** + * For `WriteEntry` objects, the absolute path to the entry on the + * filesystem. By default, this is `resolve(cwd, entry.path)`, but it can be + * overridden explicitly. + * + * @internal + */ + absolute?: string + + /** + * Used with Parser stream interface, to attach and take over when the + * stream is completely parsed. If this is set, then the prefinish, + * finish, and end events will not fire, and are the responsibility of + * the ondone method to emit properly. + * + * @internal + */ + ondone?: () => void + + /** + * Mostly for testing, but potentially useful in some cases. + * Forcibly trigger a chown on every entry, no matter what. + */ + forceChown?: boolean +} + +export type TarOptionsSync = TarOptions & { sync: true } +export type TarOptionsFile = TarOptions & { file: string } +export type TarOptionsSyncFile = TarOptionsSync & TarOptionsFile + +export type LinkCacheKey = `${number}:${number}` + +export interface TarOptionsWithAliases extends TarOptions { + C?: TarOptions['cwd'] + f?: TarOptions['file'] + z?: TarOptions['gzip'] + P?: TarOptions['preservePaths'] + U?: TarOptions['unlink'] + 'strip-components'?: TarOptions['strip'] + stripComponents?: TarOptions['strip'] + 'keep-newer'?: TarOptions['newer'] + keepNewer?: TarOptions['newer'] + 'keep-newer-files'?: TarOptions['newer'] + keepNewerFiles?: TarOptions['newer'] + k?: TarOptions['keep'] + 'keep-existing'?: TarOptions['keep'] + keepExisting?: TarOptions['keep'] + m?: TarOptions['noMtime'] + 'no-mtime'?: TarOptions['noMtime'] + p?: TarOptions['preserveOwner'] + L?: TarOptions['follow'] + h?: TarOptions['follow'] +} + +export type TarOptionsWithAliasesSync = TarOptionsWithAliases & { + sync: true +} +export type TarOptionsWithAliasesFile = TarOptionsWithAliases & { + file: string +} +export type TarOptionsWithAliasesSyncFile = + TarOptionsWithAliasesSync & TarOptionsWithAliasesFile + +export const isSyncFile = (o: TarOptions): o is TarOptionsSyncFile => + !!o.sync && !!o.file +export const isSync = (o: TarOptions): o is TarOptionsSync => + !!o.sync +export const isFile = (o: TarOptions): o is TarOptionsFile => + !!o.file + +const dealiasKey = ( + k: keyof TarOptionsWithAliases, +): keyof TarOptions => { + const d = argmap.get(k) + if (d) return d + return k as keyof TarOptions +} + +export const dealias = ( + opt: TarOptionsWithAliases = {}, +): TarOptions => { + if (!opt) return {} + const result: Record = {} + for (const [key, v] of Object.entries(opt) as [ + keyof TarOptionsWithAliases, + any, + ][]) { + // TS doesn't know that aliases are going to always be the same type + const k = dealiasKey(key) + result[k] = v + } + return result as TarOptions +} diff --git a/src/pack.ts b/src/pack.ts new file mode 100644 index 00000000..4dc7e3b5 --- /dev/null +++ b/src/pack.ts @@ -0,0 +1,500 @@ +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) + +import fs, { type Stats } from 'fs' +import { Minipass } from 'minipass' +import { BrotliCompress, Gzip } from 'minizlib' +import path from 'path' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { dealias, LinkCacheKey, TarOptions } from './options.js' +import { ReadEntry } from './read-entry.js' +import { + warnMethod, + type WarnData, + type Warner, +} from './warn-method.js' +import { + WriteEntry, + WriteEntrySync, + WriteEntryTar, +} from './write-entry.js' + +export class PackJob { + path: string + absolute: string + entry?: WriteEntry | WriteEntryTar + stat?: Stats + readdir?: string[] + pending: boolean = false + ignore: boolean = false + piped: boolean = false + + constructor(path: string, absolute: string) { + this.path = path || './' + this.absolute = absolute + } +} + +const EOF = Buffer.alloc(1024) +const ONSTAT = Symbol('onStat') +const ENDED = Symbol('ended') +const QUEUE = Symbol('queue') +const CURRENT = Symbol('current') +const PROCESS = Symbol('process') +const PROCESSING = Symbol('processing') +const PROCESSJOB = Symbol('processJob') +const JOBS = Symbol('jobs') +const JOBDONE = Symbol('jobDone') +const ADDFSENTRY = Symbol('addFSEntry') +const ADDTARENTRY = Symbol('addTarEntry') +const STAT = Symbol('stat') +const READDIR = Symbol('readdir') +const ONREADDIR = Symbol('onreaddir') +const PIPE = Symbol('pipe') +const ENTRY = Symbol('entry') +const ENTRYOPT = Symbol('entryOpt') +const WRITEENTRYCLASS = Symbol('writeEntryClass') +const WRITE = Symbol('write') +const ONDRAIN = Symbol('ondrain') + +export class Pack extends Minipass implements Warner { + opt: TarOptions + file: string + cwd: string + maxReadSize?: number + preservePaths: boolean + strict: boolean + noPax: boolean + prefix: string + linkCache: Map + statCache: Map + readdirCache: Map + portable: boolean + zip?: Gzip | BrotliCompress + noDirRecurse: boolean + follow: boolean + noMtime: boolean + mtime?: Date + filter: Exclude + jobs: number; + + [WRITEENTRYCLASS]: + | typeof WriteEntry + | typeof WriteEntrySync + [QUEUE]: PackJob[] = []; + [JOBS]: number = 0; + [PROCESSING]: boolean = false; + [ENDED]: boolean = false + + constructor(opt_: TarOptions = {}) { + super() + const opt = dealias(opt_) + this.opt = opt + this.file = opt.file || '' + this.cwd = opt.cwd || process.cwd() + this.maxReadSize = opt.maxReadSize + this.preservePaths = !!opt.preservePaths + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.prefix = normalizeWindowsPath(opt.prefix || '') + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.readdirCache = opt.readdirCache || new Map() + + this[WRITEENTRYCLASS] = WriteEntry + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + this.portable = !!opt.portable + + if (opt.gzip || opt.brotli) { + if (opt.gzip && opt.brotli) { + throw new TypeError('gzip and brotli are mutually exclusive') + } + if (opt.gzip) { + if (typeof opt.gzip !== 'object') { + opt.gzip = {} + } + if (this.portable) { + opt.gzip.portable = true + } + this.zip = new Gzip(opt.gzip) + } + if (opt.brotli) { + if (typeof opt.brotli !== 'object') { + opt.brotli = {} + } + this.zip = new BrotliCompress(opt.brotli) + } + const zip = this.zip as Gzip | BrotliCompress + zip.on('data', chunk => super.write(chunk)) + zip.on('end', () => super.end()) + zip.on('drain', () => this[ONDRAIN]()) + this.on('resume', () => zip.resume()) + } else { + this.on('drain', this[ONDRAIN]) + } + + this.noDirRecurse = !!opt.noDirRecurse + this.follow = !!opt.follow + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime + + this.filter = + typeof opt.filter === 'function' ? opt.filter : () => true + + this[JOBS] = 0 + this.jobs = Number(opt.jobs) || 4 + this[PROCESSING] = false + this[ENDED] = false + } + + warn(code: string, message: string | Error, data: WarnData = {}) { + return warnMethod(this, code, message, data) + } + + [WRITE](chunk: Buffer) { + return super.write(chunk) + } + + add(path: string | ReadEntry) { + this.write(path) + return this + } + + end(cb?: () => void): this + end(path: string, cb?: () => void): this + end( + path: string, + encoding?: Minipass.Encoding | undefined, + cb?: () => void, + ): this + end( + path?: string | (() => void), + _encoding?: Minipass.Encoding | (() => void), + _cb?: () => void, + ) { + if (typeof path === 'string') { + this.write(path) + } + this[ENDED] = true + this[PROCESS]() + return this + } + + //@ts-ignore + write(path: string | ReadEntry) { + if (this[ENDED]) { + throw new Error('write after end') + } + + if (path instanceof ReadEntry) { + this[ADDTARENTRY](path) + } else { + this[ADDFSENTRY](path) + } + return this.flowing + } + + [ADDTARENTRY](p: ReadEntry) { + const absolute = normalizeWindowsPath( + path.resolve(this.cwd, p.path), + ) + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) { + p.resume() + } else { + const job = new PackJob(p.path, absolute) + job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) + job.entry.on('end', _ => this[JOBDONE](job)) + this[JOBS] += 1 + this[QUEUE].push(job) + } + + this[PROCESS]() + } + + [ADDFSENTRY](p: string) { + const absolute = normalizeWindowsPath(path.resolve(this.cwd, p)) + this[QUEUE].push(new PackJob(p, absolute)) + this[PROCESS]() + } + + [STAT](job: PackJob) { + job.pending = true + this[JOBS] += 1 + const stat = this.follow ? 'stat' : 'lstat' + fs[stat](job.absolute, (er, stat) => { + job.pending = false + this[JOBS] -= 1 + if (er) { + this.emit('error', er) + } else { + this[ONSTAT](job, stat) + } + }) + } + + [ONSTAT](job: PackJob, stat: Stats) { + this.statCache.set(job.absolute, stat) + job.stat = stat + + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) { + job.ignore = true + } + + this[PROCESS]() + } + + [READDIR](job: PackJob) { + job.pending = true + this[JOBS] += 1 + fs.readdir(job.absolute, (er, entries) => { + job.pending = false + this[JOBS] -= 1 + if (er) { + return this.emit('error', er) + } + this[ONREADDIR](job, entries) + }) + } + + [ONREADDIR](job: PackJob, entries: string[]) { + this.readdirCache.set(job.absolute, entries) + job.readdir = entries + this[PROCESS]() + } + + [PROCESS]() { + if (this[PROCESSING]) { + return + } + + this[PROCESSING] = true + for ( + let j: PackJob | undefined, w = 0; + (j = this[QUEUE][w]) && this[JOBS] < this.jobs; + w ++ + ) { + this[PROCESSJOB](j) + if (j.ignore) { + this[QUEUE].splice(w, 1) + } + } + + this[PROCESSING] = false + + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) { + this.zip.end(EOF) + } else { + super.write(EOF) + super.end() + } + } + } + + get [CURRENT]() { + return this[QUEUE] && this[QUEUE][0] + } + + [JOBDONE](_job: PackJob) { + this[QUEUE].shift() + this[JOBS] -= 1 + this[PROCESS]() + } + + [PROCESSJOB](job: PackJob) { + if (job.pending) { + return + } + + if (job.entry) { + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job) + } + return + } + + if (!job.stat) { + const sc = this.statCache.get(job.absolute) + if (sc) { + this[ONSTAT](job, sc) + } else { + this[STAT](job) + } + } + if (!job.stat) { + return + } + + // filtered out! + if (job.ignore) { + return + } + + if ( + !this.noDirRecurse && + job.stat.isDirectory() && + !job.readdir + ) { + + const rc = this.readdirCache.get(job.absolute) + if ( rc) { + this[ONREADDIR](job, rc) + } else { + this[READDIR](job) + } + if (!job.readdir) { + return + } + } + + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job) + if (!job.entry) { + job.ignore = true + return + } + + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job) + } + } + + [ENTRYOPT](job: PackJob): TarOptions { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime, + prefix: this.prefix, + } + } + + [ENTRY](job: PackJob) { + this[JOBS] += 1 + try { + const Cls = this[WRITEENTRYCLASS] + return new Cls(job.path, this[ENTRYOPT](job)) + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)) + } catch (er) { + this.emit('error', er) + } + } + + [ONDRAIN]() { + if (this[CURRENT] && this[CURRENT].entry) { + this[CURRENT].entry.resume() + } + } + + // like .pipe() but using super, because our write() is special + [PIPE](job: PackJob) { + job.piped = true + + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) + } + + const source = job.entry + const zip = this.zip + + /* c8 ignore start */ + if (!source) { + throw new Error('must have source before piping') + } + /* c8 ignore stop */ + + if (zip) { + source.on('data', chunk => { + if (!zip.write(chunk)) { + source.pause() + } + }) + } else { + source.on('data', chunk => { + if (!super.write(chunk)) { + source.pause() + } + }) + } + } + + pause() { + if (this.zip) { + this.zip.pause() + } + return super.pause() + } +} + +export class PackSync extends Pack { + constructor(opt: TarOptions) { + super(opt) + this[WRITEENTRYCLASS] = WriteEntrySync + } + + // pause/resume are no-ops in sync streams. + pause() {} + resume() {} + + [STAT](job: PackJob) { + const stat = this.follow ? 'statSync' : 'lstatSync' + this[ONSTAT](job, fs[stat](job.absolute)) + } + + [READDIR](job: PackJob) { + this[ONREADDIR](job, fs.readdirSync(job.absolute)) + } + + // gotta get it all in this tick + [PIPE](job: PackJob) { + const source = job.entry + /* c8 ignore start */ + if (!source) { + throw new Error('job without source') + } + /* c8 ignore stop */ + const zip = this.zip + + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) + } + + if (zip) { + source.on('data', chunk => { + zip.write(chunk) + }) + } else { + source.on('data', chunk => { + super[WRITE](chunk) + }) + } + } +} diff --git a/src/parse.ts b/src/parse.ts new file mode 100644 index 00000000..7912fef0 --- /dev/null +++ b/src/parse.ts @@ -0,0 +1,629 @@ +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away + +import { EventEmitter as EE } from 'events' +import { BrotliDecompress, Unzip } from 'minizlib' +import { Header } from './header.js' +import { TarOptions } from './options.js' +import { Pax } from './pax.js' +import { ReadEntry } from './read-entry.js' +import { + warnMethod, + type WarnData, + type Warner, +} from './warn-method.js' + +const maxMetaEntrySize = 1024 * 1024 +const gzipHeader = Buffer.from([0x1f, 0x8b]) + +const STATE = Symbol('state') +const WRITEENTRY = Symbol('writeEntry') +const READENTRY = Symbol('readEntry') +const NEXTENTRY = Symbol('nextEntry') +const PROCESSENTRY = Symbol('processEntry') +const EX = Symbol('extendedHeader') +const GEX = Symbol('globalExtendedHeader') +const META = Symbol('meta') +const EMITMETA = Symbol('emitMeta') +const BUFFER = Symbol('buffer') +const QUEUE = Symbol('queue') +const ENDED = Symbol('ended') +const EMITTEDEND = Symbol('emittedEnd') +const EMIT = Symbol('emit') +const UNZIP = Symbol('unzip') +const CONSUMECHUNK = Symbol('consumeChunk') +const CONSUMECHUNKSUB = Symbol('consumeChunkSub') +const CONSUMEBODY = Symbol('consumeBody') +const CONSUMEMETA = Symbol('consumeMeta') +const CONSUMEHEADER = Symbol('consumeHeader') +const CONSUMING = Symbol('consuming') +const BUFFERCONCAT = Symbol('bufferConcat') +const MAYBEEND = Symbol('maybeEnd') +const WRITING = Symbol('writing') +const ABORTED = Symbol('aborted') +const DONE = Symbol('onDone') +const SAW_VALID_ENTRY = Symbol('sawValidEntry') +const SAW_NULL_BLOCK = Symbol('sawNullBlock') +const SAW_EOF = Symbol('sawEOF') +const CLOSESTREAM = Symbol('closeStream') + +const noop = () => true + +export type State = 'begin' | 'header' | 'ignore' | 'meta' | 'body' + +export class Parser extends EE implements Warner { + file: string + strict: boolean + maxMetaEntrySize: number + filter: Exclude + brotli?: TarOptions['brotli'] + + writable: true = true + readable: false = false; + + [QUEUE]: (ReadEntry | [string | symbol, any, any])[] = []; + [BUFFER]?: Buffer; + [READENTRY]?: ReadEntry; + [WRITEENTRY]?: ReadEntry; + [STATE]: State = 'begin'; + [META]: string = ''; + [EX]?: Pax; + [GEX]?: Pax; + [ENDED]: boolean = false; + [UNZIP]?: false | Unzip | BrotliDecompress; + [ABORTED]: boolean = false; + [SAW_VALID_ENTRY]?: boolean; + [SAW_NULL_BLOCK]: boolean = false; + [SAW_EOF]: boolean = false; + [WRITING]: boolean = false; + [CONSUMING]: boolean = false; + [EMITTEDEND]: boolean = false + + constructor(opt: TarOptions = {}) { + super() + + this.file = opt.file || '' + + // these BADARCHIVE errors can't be detected early. listen on DONE. + this.on(DONE, () => { + if ( + this[STATE] === 'begin' || + this[SAW_VALID_ENTRY] === false + ) { + // either less than 1 block of data, or all entries were invalid. + // Either way, probably not even a tarball. + this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format') + } + }) + + if (opt.ondone) { + this.on(DONE, opt.ondone) + } else { + this.on(DONE, () => { + this.emit('prefinish') + this.emit('finish') + this.emit('end') + }) + } + + this.strict = !!opt.strict + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize + this.filter = typeof opt.filter === 'function' ? opt.filter : noop + // Unlike gzip, brotli doesn't have any magic bytes to identify it + // Users need to explicitly tell us they're extracting a brotli file + // Or we infer from the file extension + const isTBR = + opt.file && + (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')) + // if it's a tbr file it MIGHT be brotli, but we don't know until + // we look at it and verify it's not a valid tar file. + this.brotli = + !opt.gzip && opt.brotli !== undefined + ? opt.brotli + : isTBR + ? undefined + : false + + // have to set this so that streams are ok piping into it + this.on('end', () => this[CLOSESTREAM]()) + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + if (typeof opt.onentry === 'function') { + this.on('entry', opt.onentry) + } + } + + warn( + code: string, + message: string | Error, + data: WarnData = {}, + ): void { + warnMethod(this, code, message, data) + } + + [CONSUMEHEADER](chunk: Buffer, position: number) { + if (this[SAW_VALID_ENTRY] === null) { + this[SAW_VALID_ENTRY] = false + } + let header + try { + header = new Header(chunk, position, this[EX], this[GEX]) + } catch (er) { + return this.warn('TAR_ENTRY_INVALID', er as Error) + } + + if (header.nullBlock) { + if (this[SAW_NULL_BLOCK]) { + this[SAW_EOF] = true + // ending an archive with no entries. pointless, but legal. + if (this[STATE] === 'begin') { + this[STATE] = 'header' + } + this[EMIT]('eof') + } else { + this[SAW_NULL_BLOCK] = true + this[EMIT]('nullBlock') + } + } else { + this[SAW_NULL_BLOCK] = false + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }) + } else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }) + } else { + const type = header.type + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { + header, + }) + } else if ( + !/^(Symbolic)?Link$/.test(type) && + header.linkpath + ) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { + header, + }) + } else { + const entry = (this[WRITEENTRY] = new ReadEntry( + header, + this[EX], + this[GEX], + )) + + // we do this for meta & ignored entries as well, because they + // are still valid tar, or else we wouldn't know to ignore them + if (!this[SAW_VALID_ENTRY]) { + if (entry.remain) { + // this might be the one! + const onend = () => { + if (!entry.invalid) { + this[SAW_VALID_ENTRY] = true + } + } + entry.on('end', onend) + } else { + this[SAW_VALID_ENTRY] = true + } + } + + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true + this[EMIT]('ignoredEntry', entry) + this[STATE] = 'ignore' + entry.resume() + } else if (entry.size > 0) { + this[META] = '' + entry.on('data', c => (this[META] += c)) + this[STATE] = 'meta' + } + } else { + this[EX] = undefined + entry.ignore = + entry.ignore || !this.filter(entry.path, entry) + + if (entry.ignore) { + // probably valid, just not something we care about + this[EMIT]('ignoredEntry', entry) + this[STATE] = entry.remain ? 'ignore' : 'header' + entry.resume() + } else { + if (entry.remain) { + this[STATE] = 'body' + } else { + this[STATE] = 'header' + entry.end() + } + + if (!this[READENTRY]) { + this[QUEUE].push(entry) + this[NEXTENTRY]() + } else { + this[QUEUE].push(entry) + } + } + } + } + } + } + } + + [CLOSESTREAM]() { + queueMicrotask(() => this.emit('close')) + } + + [PROCESSENTRY](entry?: ReadEntry | [string | symbol, any, any]) { + let go = true + + if (!entry) { + this[READENTRY] = undefined + go = false + } else if (Array.isArray(entry)) { + const [ev, ...args]: [string | symbol, any, any] = entry + this.emit(ev, ...args) + } else { + this[READENTRY] = entry + this.emit('entry', entry) + if (!entry.emittedEnd) { + entry.on('end', () => this[NEXTENTRY]()) + go = false + } + } + + return go + } + + [NEXTENTRY]() { + do {} while (this[PROCESSENTRY](this[QUEUE].shift())) + + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY] + const drainNow = !re || re.flowing || re.size === re.remain + if (drainNow) { + if (!this[WRITING]) { + this.emit('drain') + } + } else { + re.once('drain', () => this.emit('drain')) + } + } + } + + [CONSUMEBODY](chunk: Buffer, position: number) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY] + /* c8 ignore start */ + if (!entry) { + throw new Error('attempt to consume body without entry??') + } + /* c8 ignore stop */ + const br = entry.blockRemain ?? 0 + const c = + br >= chunk.length && position === 0 + ? chunk + : chunk.subarray(position, position + br) + + entry.write(c) + + if (!entry.blockRemain) { + this[STATE] = 'header' + this[WRITEENTRY] = undefined + entry.end() + } + + return c.length + } + + [CONSUMEMETA](chunk: Buffer, position: number) { + const entry = this[WRITEENTRY] + const ret = this[CONSUMEBODY](chunk, position) + + // if we finished, then the entry is reset + if (!this[WRITEENTRY] && entry) { + this[EMITMETA](entry) + } + + return ret + } + + [EMIT](ev: string | symbol, data?: any, extra?: any) { + if (!this[QUEUE].length && !this[READENTRY]) { + this.emit(ev, data, extra) + } else { + this[QUEUE].push([ev, data, extra]) + } + } + + [EMITMETA](entry: ReadEntry) { + this[EMIT]('meta', this[META]) + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = Pax.parse(this[META], this[EX], false) + break + + case 'GlobalExtendedHeader': + this[GEX] = Pax.parse(this[META], this[GEX], true) + break + + case 'NextFileHasLongPath': + case 'OldGnuLongPath': { + const ex = this[EX] ?? Object.create(null) + this[EX] = ex + ex.path = this[META].replace(/\0.*/, '') + break + } + + case 'NextFileHasLongLinkpath': { + const ex = this[EX] || Object.create(null) + this[EX] = ex + ex.linkpath = this[META].replace(/\0.*/, '') + break + } + + /* istanbul ignore next */ + default: + throw new Error('unknown meta: ' + entry.type) + } + } + + abort(error: Error) { + this[ABORTED] = true + this.emit('abort', error) + // always throws, even in non-strict mode + this.warn('TAR_ABORT', error, { recoverable: false }) + } + + write(chunk: Buffer) { + if (this[ABORTED]) { + return + } + + // first write, might be gzipped + const needSniff = + this[UNZIP] === undefined || + (this.brotli === undefined && this[UNZIP] === false) + if (needSniff && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]) + this[BUFFER] = undefined + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk + return true + } + + // look for gzip header + for ( + let i = 0; + this[UNZIP] === null && i < gzipHeader.length; + i++ + ) { + if (chunk[i] !== gzipHeader[i]) { + this[UNZIP] = false + } + } + + const maybeBrotli = this.brotli === undefined + if (this[UNZIP] === false && maybeBrotli) { + // read the first header to see if it's a valid tar file. If so, + // we can safely assume that it's not actually brotli, despite the + // .tbr or .tar.br file extension. + // if we ended before getting a full chunk, yes, def brotli + if (chunk.length < 512) { + if (this[ENDED]) { + this.brotli = true + } else { + this[BUFFER] = chunk + return true + } + } else { + // if it's tar, it's pretty reliably not brotli, chances of + // that happening are astronomical. + try { + new Header(chunk.subarray(0, 512)) + this.brotli = false + } catch (_) { + this.brotli = true + } + } + } + + if ( + this[UNZIP] === null || + (this[UNZIP] === false && this.brotli) + ) { + const ended = this[ENDED] + this[ENDED] = false + this[UNZIP] = + this[UNZIP] === undefined + ? new Unzip({}) + : new BrotliDecompress({}) + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) + this[UNZIP].on('error', er => this.abort(er as Error)) + this[UNZIP].on('end', () => { + this[ENDED] = true + this[CONSUMECHUNK]() + }) + this[WRITING] = true + const ret = this[UNZIP][ended ? 'end' : 'write'](chunk) + this[WRITING] = false + return ret + } + } + + this[WRITING] = true + if (this[UNZIP]) { + this[UNZIP].write(chunk) + } else { + this[CONSUMECHUNK](chunk) + } + this[WRITING] = false + + // return false if there's a queue, or if the current entry isn't flowing + const ret = this[QUEUE].length + ? false + : this[READENTRY] + ? this[READENTRY].flowing + : true + + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) { + this[READENTRY]?.once('drain', () => this.emit('drain')) + } + + return ret + } + + [BUFFERCONCAT](c: Buffer) { + if (c && !this[ABORTED]) { + this[BUFFER] = this[BUFFER] + ? Buffer.concat([this[BUFFER], c]) + : c + } + } + + [MAYBEEND]() { + if ( + this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING] + ) { + this[EMITTEDEND] = true + const entry = this[WRITEENTRY] + if (entry && entry.blockRemain) { + // truncated, likely a damaged file + const have = this[BUFFER] ? this[BUFFER].length : 0 + this.warn( + 'TAR_BAD_ARCHIVE', + `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, + { entry }, + ) + if (this[BUFFER]) { + entry.write(this[BUFFER]) + } + entry.end() + } + this[EMIT](DONE) + } + } + + [CONSUMECHUNK](chunk?: Buffer) { + if (this[CONSUMING] && chunk) { + this[BUFFERCONCAT](chunk) + } else if (!chunk && !this[BUFFER]) { + this[MAYBEEND]() + } else if (chunk) { + this[CONSUMING] = true + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk) + const c = this[BUFFER] + this[BUFFER] = undefined + this[CONSUMECHUNKSUB](c) + } else { + this[CONSUMECHUNKSUB](chunk) + } + + while ( + this[BUFFER] && + (this[BUFFER] as Buffer)?.length >= 512 && + !this[ABORTED] && + !this[SAW_EOF] + ) { + const c = this[BUFFER] + this[BUFFER] = undefined + this[CONSUMECHUNKSUB](c) + } + this[CONSUMING] = false + } + + if (!this[BUFFER] || this[ENDED]) { + this[MAYBEEND]() + } + } + + [CONSUMECHUNKSUB](chunk: Buffer) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0 + const length = chunk.length + while ( + position + 512 <= length && + !this[ABORTED] && + !this[SAW_EOF] + ) { + switch (this[STATE]) { + case 'begin': + case 'header': + this[CONSUMEHEADER](chunk, position) + position += 512 + break + + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position) + break + + case 'meta': + position += this[CONSUMEMETA](chunk, position) + break + + /* istanbul ignore next */ + default: + throw new Error('invalid state: ' + this[STATE]) + } + } + + if (position < length) { + if (this[BUFFER]) { + this[BUFFER] = Buffer.concat([ + chunk.subarray(position), + this[BUFFER], + ]) + } else { + this[BUFFER] = chunk.subarray(position) + } + } + } + + end(chunk?: Buffer) { + if (!this[ABORTED]) { + if (this[UNZIP]) { + if (chunk) this[UNZIP].write(chunk) + this[UNZIP].end() + } else { + this[ENDED] = true + if (this.brotli === undefined) + chunk = chunk || Buffer.alloc(0) + if (chunk) this.write(chunk) + } + } + } +} diff --git a/src/path-reservations.ts b/src/path-reservations.ts new file mode 100644 index 00000000..711e9ee2 --- /dev/null +++ b/src/path-reservations.ts @@ -0,0 +1,192 @@ +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. + +import { join } from 'node:path' +import { normalizeUnicode } from './normalize-unicode.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' + +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const isWindows = platform === 'win32' + +export type Reservation = { + paths: string[] + dirs: Set +} + +export type Handler = (clear: () => void) => void + +// return a set of parent dirs for a given path +// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] +const getDirs = (path: string) => { + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set: string[], path) => { + const s = set[set.length - 1] + if (s !== undefined) { + path = join(s, path) + } + set.push(path || '/') + return set + }, []) + return dirs +} + +export class PathReservations { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + #queues = new Map)[]>() + + // fn => {paths:[path,...], dirs:[path, ...]} + #reservations = new Map() + + // functions currently running + #running = new Set() + + reserve(paths: string[], fn: Handler) { + paths = isWindows + ? ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return stripTrailingSlashes( + join(normalizeUnicode(p)), + ).toLowerCase() + }) + + const dirs = new Set( + paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)), + ) + this.#reservations.set(fn, { dirs, paths }) + for (const p of paths) { + const q = this.#queues.get(p) + if (!q) { + this.#queues.set(p, [fn]) + } else { + q.push(fn) + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir) + if (!q) { + this.#queues.set(dir, [new Set([fn])]) + } else { + const l = q[q.length - 1] + if (l instanceof Set) { + l.add(fn) + } else { + q.push(new Set([fn])) + } + } + } + return this.#run(fn) + } + + // return the queues for each path the function cares about + // fn => {paths, dirs} + #getQueues(fn: Handler): { + paths: Handler[][] + dirs: (Handler | Set)[][] + } { + const res = this.#reservations.get(fn) + /* istanbul ignore if - unpossible */ + if (!res) { + throw new Error('function does not have any path reservations') + } + return { + paths: res.paths.map((path: string) => + this.#queues.get(path), + ) as Handler[][], + dirs: [...res.dirs].map(path => this.#queues.get(path)) as ( + | Handler + | Set + )[][], + } + } + + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + check(fn: Handler) { + const { paths, dirs } = this.#getQueues(fn) + return ( + paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)) + ) + } + + // run the function if it's first in line and not already running + #run(fn: Handler) { + if (this.#running.has(fn) || !this.check(fn)) { + return false + } + this.#running.add(fn) + fn(() => this.#clear(fn)) + return true + } + + #clear(fn: Handler) { + if (!this.#running.has(fn)) { + return false + } + const res = this.#reservations.get(fn) + /* c8 ignore start */ + if (!res) { + throw new Error('invalid reservation') + } + /* c8 ignore stop */ + const { paths, dirs } = res + + const next = new Set() + for (const path of paths) { + const q = this.#queues.get(path) + /* c8 ignore start */ + if (!q || q?.[0] !== fn) { + continue + } + /* c8 ignore stop */ + const q0 = q[1] + if (!q0) { + this.#queues.delete(path) + continue + } + q.shift() + if (typeof q0 === 'function') { + next.add(q0) + } else { + for (const f of q0) { + next.add(f) + } + } + } + + for (const dir of dirs) { + const q = this.#queues.get(dir) + const q0 = q?.[0] + if (!(q0 instanceof Set) || !q) continue + if (q0.size === 1 && q.length === 1) { + this.#queues.delete(dir) + continue + } else if (q0.size === 1) { + q.shift() + // next one must be a function, + // or else the Set would've been reused + const n = q[0] + if (typeof n === 'function') { + next.add(n) + } + } else { + q0.delete(fn) + } + } + + this.#running.delete(fn) + next.forEach(fn => this.#run(fn)) + return true + } +} diff --git a/src/pax.ts b/src/pax.ts new file mode 100644 index 00000000..7c542c95 --- /dev/null +++ b/src/pax.ts @@ -0,0 +1,181 @@ +import { basename } from 'node:path' +import { Header, HeaderData } from './header.js' + +export class Pax implements HeaderData { + atime?: Date + mtime?: Date + ctime?: Date + + charset?: string + comment?: string + + gid?: number + uid?: number + + gname?: string + uname?: string + linkpath?: string + dev?: number + ino?: number + nlink?: number + path?: string + size?: number + mode?: number + + global: boolean + + constructor(obj: HeaderData, global: boolean = false) { + this.atime = obj.atime + this.charset = obj.charset + this.comment = obj.comment + this.gid = obj.gid + this.gname = obj.gname + this.linkpath = obj.linkpath + this.mtime = obj.mtime + this.path = obj.path + this.size = obj.size + this.uid = obj.uid + this.uname = obj.uname + this.dev = obj.dev + this.ino = obj.ino + this.nlink = obj.nlink + this.global = global + } + + encode() { + const body = this.encodeBody() + if (body === '') { + return Buffer.allocUnsafe(0) + } + + const bodyLen = Buffer.byteLength(body) + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512) + const buf = Buffer.allocUnsafe(bufLen) + + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0 + } + + new Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + path: ('PaxHeader/' + basename(this.path || '')).slice(0, 99), + mode: this.mode || 0o644, + uid: this.uid, + gid: this.gid, + size: bodyLen, + mtime: this.mtime, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime, + ctime: this.ctime, + }).encode(buf) + + buf.write(body, 512, bodyLen, 'utf8') + + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0 + } + + return buf + } + + encodeBody() { + return ( + this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname') + ) + } + + encodeField(field: keyof Pax): string { + if (this[field] === undefined) { + return '' + } + const r = this[field] + const v = r instanceof Date ? r.getTime() / 1000 : r + const s = + ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' + ? 'SCHILY.' + : '') + + field + + '=' + + v + + '\n' + const byteLen = Buffer.byteLength(s) + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1 + if (byteLen + digits >= Math.pow(10, digits)) { + digits += 1 + } + const len = digits + byteLen + return len + s + } + + static parse(str: string, ex?: HeaderData, g: boolean = false) { + return new Pax(merge(parseKV(str), ex), g) + } +} + +const merge = (a: HeaderData, b?: HeaderData) => + b ? Object.assign({}, b, a) : a + +const parseKV = (str: string) => + str + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)) + +const parseKVLine = (set: Record, line: string) => { + const n = parseInt(line, 10) + + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) { + return set + } + + line = line.slice((n + ' ').length) + const kv = line.split('=') + const r = kv.shift() + + /* c8 ignore next */ + if (!r) throw new Error('fell of key/value list somehow') + + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1') + if (!k) { + return set + } + + const v = kv.join('=') + set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) + ? new Date(Number(v) * 1000) + : /^[0-9]+$/.test(v) + ? +v + : v + return set +} diff --git a/src/read-entry.ts b/src/read-entry.ts new file mode 100644 index 00000000..98260ee7 --- /dev/null +++ b/src/read-entry.ts @@ -0,0 +1,151 @@ +import { Minipass } from 'minipass' +import { Header } from './header.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { Pax } from './pax.js' +import { EntryTypeName } from './types.js' + +export class ReadEntry extends Minipass { + extended?: Pax + globalExtended?: Pax + header: Header + startBlockSize: number + blockRemain: number + remain: number + type: EntryTypeName + meta: boolean = false + ignore: boolean = false + path: string + mode?: number + uid?: number + gid?: number + uname?: string + gname?: string + size: number = 0 + mtime?: Date + atime?: Date + ctime?: Date + linkpath?: string + + dev?: number + ino?: number + nlink?: number + invalid: boolean = false + absolute?: string + unsupported: boolean = false + + constructor(header: Header, ex?: Pax, gex?: Pax) { + super({}) + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause() + this.extended = ex + this.globalExtended = gex + this.header = header + this.remain = header.size ?? 0 + this.startBlockSize = 512 * Math.ceil(this.remain / 512) + this.blockRemain = this.startBlockSize + this.type = header.type + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break + + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true + break + + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true + } + + /* c8 ignore start */ + if (!header.path) { + throw new Error('no path provided for tar.ReadEntry') + } + /* c8 ignore stop */ + + this.path = normalizeWindowsPath(header.path) as string + this.mode = header.mode + if (this.mode) { + this.mode = this.mode & 0o7777 + } + this.uid = header.uid + this.gid = header.gid + this.uname = header.uname + this.gname = header.gname + this.size = this.remain + this.mtime = header.mtime + this.atime = header.atime + this.ctime = header.ctime + this.linkpath = header.linkpath + ? normalizeWindowsPath(header.linkpath) + : undefined + this.uname = header.uname + this.gname = header.gname + + if (ex) { + this.#slurp(ex) + } + if (gex) { + this.#slurp(gex, true) + } + } + + write(data: Buffer) { + const writeLen = data.length + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate') + } + + const r = this.remain + const br = this.blockRemain + this.remain = Math.max(0, r - writeLen) + this.blockRemain = Math.max(0, br - writeLen) + if (this.ignore) { + return true + } + + if (r >= writeLen) { + return super.write(data) + } + + // r < writeLen + return super.write(data.subarray(0, r)) + } + + #slurp(ex: Pax, gex: boolean = false) { + if (ex.path) ex.path = normalizeWindowsPath(ex.path) + if (ex.linkpath) ex.linkpath = normalizeWindowsPath(ex.linkpath) + Object.assign( + this, + Object.fromEntries( + Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !( + v === null || + v === undefined || + (k === 'path' && gex) + ) + }), + ), + ) + } +} diff --git a/src/replace.ts b/src/replace.ts new file mode 100644 index 00000000..1d45445d --- /dev/null +++ b/src/replace.ts @@ -0,0 +1,317 @@ +// tar -r +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass' +import { Minipass } from 'minipass' +import fs from 'node:fs' +import path from 'node:path' +import { Header } from './header.js' +import { list } from './list.js' +import { + dealias, + isFile, + isSyncFile, + TarOptionsFile, + TarOptionsSyncFile, + TarOptionsWithAliases, + TarOptionsWithAliasesFile, + TarOptionsWithAliasesSyncFile, +} from './options.js' +import { Pack, PackSync } from './pack.js' + +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. + +export function replace( + opt: TarOptionsWithAliasesSyncFile, + files?: string[], +): void +export function replace( + opt: TarOptionsWithAliasesFile, + files?: string[], + cb?: () => any, +): Promise +export function replace( + opt: TarOptionsWithAliasesFile, + cb: () => any, +): Promise +export function replace( + opt_: TarOptionsWithAliases, + files?: string[] | (() => any), + cb?: () => any, +): void | Promise { + const opt = dealias(opt_) + + if (!isFile(opt)) { + throw new TypeError('file is required') + } + + if ( + opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr') + ) { + throw new TypeError('cannot append to compressed archives') + } + + if (!files || !Array.isArray(files) || !files.length) { + throw new TypeError('no files or directories specified') + } + + files = Array.from(files) + + return isSyncFile(opt) + ? replaceSync(opt, files) + : replace_(opt, files, cb) +} + +const replaceSync = (opt: TarOptionsSyncFile, files: string[]) => { + const p = new PackSync(opt) + + let threw = true + let fd + let position + + try { + try { + fd = fs.openSync(opt.file, 'r+') + } catch (er) { + if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') { + fd = fs.openSync(opt.file, 'w+') + } else { + throw er + } + } + + const st = fs.fstatSync(fd) + const headBuf = Buffer.alloc(512) + + POSITION: for ( + position = 0; + position < st.size; + position += 512 + ) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = fs.readSync( + fd, + headBuf, + bufPos, + headBuf.length - bufPos, + position + bufPos, + ) + + if ( + position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b + ) { + throw new Error('cannot append to compressed archives') + } + + if (!bytes) { + break POSITION + } + } + + const h = new Header(headBuf) + if (!h.cksumValid) { + break + } + const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512) + if (position + entryBlockSize + 512 > st.size) { + break + } + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime) + } + } + threw = false + + streamSync(opt, p, position, fd, files) + } finally { + if (threw) { + try { + fs.closeSync(fd as number) + } catch (er) {} + } + } +} + +const streamSync = ( + opt: TarOptionsSyncFile, + p: Pack, + position: number, + fd: number, + files: string[], +) => { + const stream = new WriteStreamSync(opt.file, { + fd: fd, + start: position, + }) + p.pipe(stream as unknown as Minipass.Writable) + addFilesSync(p, files) +} + +const replace_ = ( + opt: TarOptionsFile, + files: string[], + cb?: () => void, +): Promise => { + files = Array.from(files) + const p = new Pack(opt) + + const getPos = ( + fd: number, + size: number, + cb_: (er?: null | Error, pos?: number) => void, + ) => { + const cb = (er?: Error | null, pos?: number) => { + if (er) { + fs.close(fd, _ => cb_(er)) + } else { + cb_(null, pos) + } + } + + let position = 0 + if (size === 0) { + return cb(null, 0) + } + + let bufPos = 0 + const headBuf = Buffer.alloc(512) + const onread = (er?: null | Error, bytes?: number): void => { + if (er || typeof bytes === 'undefined') { + return cb(er) + } + bufPos += bytes + if (bufPos < 512 && bytes) { + return fs.read( + fd, + headBuf, + bufPos, + headBuf.length - bufPos, + position + bufPos, + onread, + ) + } + + if ( + position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b + ) { + return cb(new Error('cannot append to compressed archives')) + } + + // truncated header + if (bufPos < 512) { + return cb(null, position) + } + + const h = new Header(headBuf) + if (!h.cksumValid) { + return cb(null, position) + } + + const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512) + if (position + entryBlockSize + 512 > size) { + return cb(null, position) + } + + position += entryBlockSize + 512 + if (position >= size) { + return cb(null, position) + } + + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime) + } + bufPos = 0 + fs.read(fd, headBuf, 0, 512, position, onread) + } + fs.read(fd, headBuf, 0, 512, position, onread) + } + + const promise = new Promise((resolve, reject) => { + p.on('error', reject) + let flag = 'r+' + const onopen = ( + er?: NodeJS.ErrnoException | null, + fd?: number, + ) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+' + return fs.open(opt.file, flag, onopen) + } + + if (er || !fd) { + return reject(er) + } + + fs.fstat(fd, (er, st) => { + if (er) { + return fs.close(fd, () => reject(er)) + } + + getPos(fd, st.size, (er, position) => { + if (er) { + return reject(er) + } + const stream = new WriteStream(opt.file, { + fd: fd, + start: position, + }) + p.pipe(stream as unknown as Minipass.Writable) + stream.on('error', reject) + stream.on('close', resolve) + addFilesAsync(p, files) + }) + }) + } + fs.open(opt.file, flag, onopen) + }) + + return cb ? promise.then(cb, cb) : promise +} + +const addFilesSync = (p: Pack, files: string[]) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onentry: entry => p.add(entry), + }) + } else { + p.add(file) + } + }) + p.end() +} + +const addFilesAsync = async ( + p: Pack, + files: string[], + i = 0, +): Promise => { + for (; i < files.length; i++) { + const file = String(files[i]) + if (file.charAt(0) === '@') { + return list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onentry: entry => p.add(entry), + }).then(_ => addFilesAsync(p, files)) + } else { + p.add(file) + } + } + p.end() +} diff --git a/src/strip-absolute-path.ts b/src/strip-absolute-path.ts new file mode 100644 index 00000000..f127e329 --- /dev/null +++ b/src/strip-absolute-path.ts @@ -0,0 +1,27 @@ +// unix absolute paths are also absolute on win32, so we use this for both +import { win32 } from 'node:path' +const { isAbsolute, parse } = win32 + +// returns [root, stripped] +// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in +// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / +// explicitly if it's the first character. +// drive-specific relative paths on Windows get their root stripped off even +// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] +export const stripAbsolutePath = (path: string) => { + let r = '' + + let parsed = parse(path) + while (isAbsolute(path) || parsed.root) { + // windows will think that //x/y/z has a "root" of //x/y/ + // but strip the //?/C:/ off of //?/C:/path + const root = + path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' + ? '/' + : parsed.root + path = path.slice(root.length) + r += root + parsed = parse(path) + } + return [r, path] +} diff --git a/src/strip-trailing-slashes.ts b/src/strip-trailing-slashes.ts new file mode 100644 index 00000000..b2a111ac --- /dev/null +++ b/src/strip-trailing-slashes.ts @@ -0,0 +1,13 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +export const stripTrailingSlashes = (str: string) => { + let i = str.length - 1 + let slashesStart = -1 + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i + i-- + } + return slashesStart === -1 ? str : str.slice(0, slashesStart) +} diff --git a/src/symlink-error.ts b/src/symlink-error.ts new file mode 100644 index 00000000..c237cb94 --- /dev/null +++ b/src/symlink-error.ts @@ -0,0 +1,14 @@ +export class SymlinkError extends Error { + path: string + symlink: string + syscall: 'symlink' = 'symlink' + code: 'TAR_SYMLINK_ERROR' = 'TAR_SYMLINK_ERROR' + constructor (symlink: string, path: string) { + super('Cannot extract through symbolic link') + this.symlink = symlink + this.path = path + } + get name() { + return 'SymlinkError' + } +} diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 00000000..080995a6 --- /dev/null +++ b/src/types.ts @@ -0,0 +1,97 @@ +export const isCode = (c: string): c is EntryTypeCode => + name.has(c as EntryTypeCode) + +export const isName = (c: string): c is EntryTypeName => + code.has(c as EntryTypeName) + +export type EntryTypeCode = + | '0' + | '' + | '1' + | '2' + | '3' + | '4' + | '5' + | '6' + | '7' + | 'g' + | 'x' + | 'A' + | 'D' + | 'I' + | 'K' + | 'L' + | 'M' + | 'N' + | 'S' + | 'V' + | 'X' + +export type EntryTypeName = + | 'File' + | 'OldFile' + | 'Link' + | 'SymbolicLink' + | 'CharacterDevice' + | 'BlockDevice' + | 'Directory' + | 'FIFO' + | 'ContiguousFile' + | 'GlobalExtendedHeader' + | 'ExtendedHeader' + | 'SolarisACL' + | 'GNUDumpDir' + | 'Inode' + | 'NextFileHasLongLinkpath' + | 'NextFileHasLongPath' + | 'ContinuationFile' + | 'OldGnuLongPath' + | 'SparseFile' + | 'TapeVolumeHeader' + | 'OldExtendedHeader' + +// map types from key to human-friendly name +export const name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]) + +// map the other direction +export const code = new Map( + Array.from(name).map(kv => [kv[1], kv[0]]), +) diff --git a/src/unpack.ts b/src/unpack.ts new file mode 100644 index 00000000..064bbcf9 --- /dev/null +++ b/src/unpack.ts @@ -0,0 +1,1101 @@ +// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. +// but the path reservations are required to avoid race conditions where +// parallelized unpack ops may mess with one another, due to dependencies +// (like a Link depending on its target) or destructive operations (like +// clobbering an fs object to create one of a different type.) + +import * as fsm from '@isaacs/fs-minipass' +import assert from 'node:assert' +import { randomBytes } from 'node:crypto' +import fs, { type Stats } from 'node:fs' +import path from 'node:path' +import { getWriteFlag } from './get-write-flag.js' +import { mkdir, MkdirError, mkdirSync } from './mkdir.js' +import { normalizeUnicode } from './normalize-unicode.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { Parser } from './parse.js' +import { stripAbsolutePath } from './strip-absolute-path.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' +import * as wc from './winchars.js' + +import { TarOptions } from './options.js' +import { PathReservations } from './path-reservations.js' +import { ReadEntry } from './read-entry.js' +import { WarnData } from './warn-method.js' + +const ONENTRY = Symbol('onEntry') +const CHECKFS = Symbol('checkFs') +const CHECKFS2 = Symbol('checkFs2') +const PRUNECACHE = Symbol('pruneCache') +const ISREUSABLE = Symbol('isReusable') +const MAKEFS = Symbol('makeFs') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const LINK = Symbol('link') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const UNSUPPORTED = Symbol('unsupported') +const CHECKPATH = Symbol('checkPath') +const MKDIR = Symbol('mkdir') +const ONERROR = Symbol('onError') +const PENDING = Symbol('pending') +const PEND = Symbol('pend') +const UNPEND = Symbol('unpend') +const ENDED = Symbol('ended') +const MAYBECLOSE = Symbol('maybeClose') +const SKIP = Symbol('skip') +const DOCHOWN = Symbol('doChown') +const UID = Symbol('uid') +const GID = Symbol('gid') +const CHECKED_CWD = Symbol('checkedCwd') +const platform = + process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const isWindows = platform === 'win32' +const DEFAULT_MAX_DEPTH = 1024 + +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* istanbul ignore next */ +const unlinkFile = ( + path: string, + cb: (er?: Error | null) => void, +) => { + if (!isWindows) { + return fs.unlink(path, cb) + } + + const name = path + '.DELETE.' + randomBytes(16).toString('hex') + fs.rename(path, name, er => { + if (er) { + return cb(er) + } + fs.unlink(name, cb) + }) +} + +/* istanbul ignore next */ +const unlinkFileSync = (path: string) => { + if (!isWindows) { + return fs.unlinkSync(path) + } + + const name = path + '.DELETE.' + randomBytes(16).toString('hex') + fs.renameSync(path, name) + fs.unlinkSync(name) +} + +// this.gid, entry.gid, this.processUid +const uint32 = ( + a: number | undefined, + b: number | undefined, + c: number | undefined, +) => + a !== undefined && a === a >>> 0 + ? a + : b !== undefined && b === b >>> 0 + ? b + : c + +// clear the cache if it's a case-insensitive unicode-squashing match. +// we can't know if the current file system is case-sensitive or supports +// unicode fully, so we check for similarity on the maximally compatible +// representation. Err on the side of pruning, since all it's doing is +// preventing lstats, and it's not the end of the world if we get a false +// positive. +// Note that on windows, we always drop the entire cache whenever a +// symbolic link is encountered, because 8.3 filenames are impossible +// to reason about, and collisions are hazards rather than just failures. +const cacheKeyNormalize = (path: string) => + stripTrailingSlashes( + normalizeWindowsPath(normalizeUnicode(path)), + ).toLowerCase() + +// remove all cache entries matching ${abs}/** +const pruneCache = (cache: Map, abs: string) => { + abs = cacheKeyNormalize(abs) + for (const path of cache.keys()) { + const pnorm = cacheKeyNormalize(path) + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { + cache.delete(path) + } + } +} + +const dropCache = (cache: Map) => { + for (const key of cache.keys()) { + cache.delete(key) + } +} + +export class Unpack extends Parser { + [ENDED]: boolean = false; + [CHECKED_CWD]: boolean = false; + [PENDING]: number = 0 + + reservations: PathReservations = new PathReservations() + transform?: TarOptions['transform'] + writable: true = true + readable: false = false + dirCache: Exclude + uid?: number + gid?: number + setOwner: boolean + preserveOwner: boolean + processGid?: number + processUid?: number + maxDepth: number + forceChown: boolean + win32: boolean + newer: boolean + keep: boolean + noMtime: boolean + preservePaths: boolean + unlink: boolean + cwd: string + strip: number + processUmask: number + umask: number + dmode: number + fmode: number + noChmod: boolean + + constructor(opt: TarOptions = {}) { + opt.ondone = () => { + this[ENDED] = true + this[MAYBECLOSE]() + } + + super(opt) + + this.transform = opt.transform + + this.dirCache = opt.dirCache || new Map() + this.noChmod = !!opt.noChmod + + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if ( + typeof opt.uid !== 'number' || + typeof opt.gid !== 'number' + ) { + throw new TypeError( + 'cannot set owner without number uid and gid', + ) + } + if (opt.preserveOwner) { + throw new TypeError( + 'cannot preserve owner in archive and also set owner explicitly', + ) + } + this.uid = opt.uid + this.gid = opt.gid + this.setOwner = true + } else { + this.uid = undefined + this.gid = undefined + this.setOwner = false + } + + // default true for root + if ( + opt.preserveOwner === undefined && + typeof opt.uid !== 'number' + ) { + this.preserveOwner = !!( + process.getuid && process.getuid() === 0 + ) + } else { + this.preserveOwner = !!opt.preserveOwner + } + + this.processUid = + (this.preserveOwner || this.setOwner) && process.getuid + ? process.getuid() + : undefined + this.processGid = + (this.preserveOwner || this.setOwner) && process.getgid + ? process.getgid() + : undefined + + // prevent excessively deep nesting of subfolders + // set to `Infinity` to remove this restriction + this.maxDepth = + typeof opt.maxDepth === 'number' + ? opt.maxDepth + : DEFAULT_MAX_DEPTH + + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true + + // turn > this[ONENTRY](entry)) + } + + // a bad or damaged archive is a warning for Parser, but an error + // when extracting. Mark those errors as unrecoverable, because + // the Unpack contract cannot be met. + warn(code: string, msg: string | Error, data: WarnData = {}) { + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { + data.recoverable = false + } + return super.warn(code, msg, data) + } + + [MAYBECLOSE]() { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish') + this.emit('finish') + this.emit('end') + } + } + + [CHECKPATH](entry: ReadEntry) { + const p = normalizeWindowsPath(entry.path) + const parts = p.split('/') + + if (this.strip) { + if (parts.length < this.strip) { + return false + } + if (entry.type === 'Link') { + const linkparts = normalizeWindowsPath( + String(entry.linkpath), + ).split('/') + if (linkparts.length >= this.strip) { + entry.linkpath = linkparts.slice(this.strip).join('/') + } else { + return false + } + } + parts.splice(0, this.strip) + entry.path = parts.join('/') + } + + if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { + this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { + entry, + path: p, + depth: parts.length, + maxDepth: this.maxDepth, + }) + return false + } + + if (!this.preservePaths) { + if ( + parts.includes('..') || + (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? '')) + ) { + this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { + entry, + path: p, + }) + return false + } + + // strip off the root + const [root, stripped] = stripAbsolutePath(p) + if (root) { + entry.path = String(stripped) + this.warn( + 'TAR_ENTRY_INFO', + `stripping ${root} from absolute path`, + { + entry, + path: p, + }, + ) + } + } + + if (path.isAbsolute(entry.path)) { + entry.absolute = normalizeWindowsPath(path.resolve(entry.path)) + } else { + entry.absolute = normalizeWindowsPath( + path.resolve(this.cwd, entry.path), + ) + } + + // if we somehow ended up with a path that escapes the cwd, and we are + // not in preservePaths mode, then something is fishy! This should have + // been prevented above, so ignore this for coverage. + /* istanbul ignore if - defense in depth */ + if ( + !this.preservePaths && + typeof entry.absolute === 'string' && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd + ) { + this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { + entry, + path: normalizeWindowsPath(entry.path), + resolvedPath: entry.absolute, + cwd: this.cwd, + }) + return false + } + + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if ( + entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir' + ) { + return false + } + + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = path.win32.parse(String(entry.absolute)) + entry.absolute = + aRoot + wc.encode(String(entry.absolute).slice(aRoot.length)) + const { root: pRoot } = path.win32.parse(entry.path) + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) + } + + return true + } + + [ONENTRY](entry: ReadEntry) { + if (!this[CHECKPATH](entry)) { + return entry.resume() + } + + assert.equal(typeof entry.absolute, 'string') + + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) { + entry.mode = entry.mode | 0o700 + } + + // eslint-disable-next-line no-fallthrough + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry) + + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + default: + return this[UNSUPPORTED](entry) + } + } + + [ONERROR](er: Error, entry: ReadEntry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') { + this.emit('error', er) + } else { + this.warn('TAR_ENTRY_ERROR', er, { entry }) + this[UNPEND]() + entry.resume() + } + } + + [MKDIR]( + dir: string, + mode: number, + cb: (er?: null | MkdirError, made?: string) => void, + ) { + mkdir( + normalizeWindowsPath(dir), + { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + noChmod: this.noChmod, + }, + cb, + ) + } + + [DOCHOWN](entry: ReadEntry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return ( + this.forceChown || + (this.preserveOwner && + ((typeof entry.uid === 'number' && + entry.uid !== this.processUid) || + (typeof entry.gid === 'number' && + entry.gid !== this.processGid))) || + (typeof this.uid === 'number' && + this.uid !== this.processUid) || + (typeof this.gid === 'number' && this.gid !== this.processGid) + ) + } + + [UID](entry: ReadEntry) { + return uint32(this.uid, entry.uid, this.processUid) + } + + [GID](entry: ReadEntry) { + return uint32(this.gid, entry.gid, this.processGid) + } + + [FILE](entry: ReadEntry, fullyDone: () => void) { + const mode = + typeof entry.mode === 'number' + ? entry.mode & 0o7777 + : this.fmode + const stream = new fsm.WriteStream(String(entry.absolute), { + // slight lie, but it can be numeric flags + flags: getWriteFlag(entry.size) as string, + mode: mode, + autoClose: false, + }) + stream.on('error', (er: Error) => { + if (stream.fd) { + fs.close(stream.fd, () => {}) + } + + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + stream.write = () => true + this[ONERROR](er, entry) + fullyDone() + }) + + let actions = 1 + const done = (er?: null | Error) => { + if (er) { + /* istanbul ignore else - we should always have a fd by now */ + if (stream.fd) { + fs.close(stream.fd, () => {}) + } + + this[ONERROR](er, entry) + fullyDone() + return + } + + if (--actions === 0) { + if (stream.fd !== undefined) { + fs.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry) + } else { + this[UNPEND]() + } + fullyDone() + }) + } + } + } + + stream.on('finish', () => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = String(entry.absolute) + const fd = stream.fd + + if (typeof fd === 'number' && entry.mtime && !this.noMtime) { + actions++ + const atime = entry.atime || new Date() + const mtime = entry.mtime + fs.futimes(fd, atime, mtime, er => + er + ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done(), + ) + } + + if (typeof fd === 'number' && this[DOCHOWN](entry)) { + actions++ + const uid = this[UID](entry) + const gid = this[GID](entry) + if (typeof uid === 'number' && typeof gid === 'number') { + fs.fchown(fd, uid, gid, er => + er + ? fs.chown(abs, uid, gid, er2 => done(er2 && er)) + : done(), + ) + } + } + + done() + }) + + const tx = this.transform ? this.transform(entry) || entry : entry + if (tx !== entry) { + tx.on('error', (er: Error) => { + this[ONERROR](er, entry) + fullyDone() + }) + entry.pipe(tx) + } + tx.pipe(stream) + } + + [DIRECTORY](entry: ReadEntry, fullyDone: () => void) { + const mode = + typeof entry.mode === 'number' + ? entry.mode & 0o7777 + : this.dmode + this[MKDIR](String(entry.absolute), mode, er => { + if (er) { + this[ONERROR](er, entry) + fullyDone() + return + } + + let actions = 1 + const done = () => { + if (--actions === 0) { + fullyDone() + this[UNPEND]() + entry.resume() + } + } + + if (entry.mtime && !this.noMtime) { + actions++ + fs.utimes( + String(entry.absolute), + entry.atime || new Date(), + entry.mtime, + done, + ) + } + + if (this[DOCHOWN](entry)) { + actions++ + fs.chown( + String(entry.absolute), + Number(this[UID](entry)), + Number(this[GID](entry)), + done, + ) + } + + done() + }) + } + + [UNSUPPORTED](entry: ReadEntry) { + entry.unsupported = true + this.warn( + 'TAR_ENTRY_UNSUPPORTED', + `unsupported entry type: ${entry.type}`, + { entry }, + ) + entry.resume() + } + + [SYMLINK](entry: ReadEntry, done: () => void) { + this[LINK](entry, String(entry.linkpath), 'symlink', done) + } + + [HARDLINK](entry: ReadEntry, done: () => void) { + const linkpath = normalizeWindowsPath( + path.resolve(this.cwd, String(entry.linkpath)), + ) + this[LINK](entry, linkpath, 'link', done) + } + + [PEND]() { + this[PENDING]++ + } + + [UNPEND]() { + this[PENDING]-- + this[MAYBECLOSE]() + } + + [SKIP](entry: ReadEntry) { + this[UNPEND]() + entry.resume() + } + + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE](entry: ReadEntry, st: Stats) { + return ( + entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + !isWindows + ) + } + + // check if a thing is there, and if so, try to clobber it + [CHECKFS](entry: ReadEntry) { + this[PEND]() + const paths = [entry.path] + if (entry.linkpath) { + paths.push(entry.linkpath) + } + this.reservations.reserve(paths, done => + this[CHECKFS2](entry, done), + ) + } + + [PRUNECACHE](entry: ReadEntry) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + // If a symbolic link is encountered, all bets are off. There is no + // reasonable way to sanitize the cache in such a way we will be able to + // avoid having filesystem collisions. If this happens with a non-symlink + // entry, it'll just fail to unpack, but a symlink to a directory, using an + // 8.3 shortname or certain unicode attacks, can evade detection and lead + // to arbitrary writes to anywhere on the system. + if (entry.type === 'SymbolicLink') { + dropCache(this.dirCache) + } else if (entry.type !== 'Directory') { + pruneCache(this.dirCache, String(entry.absolute)) + } + } + + [CHECKFS2](entry: ReadEntry, fullyDone: (er?: Error) => void) { + this[PRUNECACHE](entry) + + const done = (er?: Error) => { + this[PRUNECACHE](entry) + fullyDone(er) + } + + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + this[CHECKED_CWD] = true + start() + }) + } + + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = normalizeWindowsPath( + path.dirname(String(entry.absolute)), + ) + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + afterMakeParent() + }) + } + } + afterMakeParent() + } + + const afterMakeParent = () => { + fs.lstat(String(entry.absolute), (lstatEr, st) => { + if ( + st && + (this.keep || + (this.newer && st.mtime > (entry.mtime ?? st.mtime))) + ) { + this[SKIP](entry) + done() + return + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry, done) + } + + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = + !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const afterChmod = (er?: Error | null | undefined) => + this[MAKEFS](er ?? null, entry, done) + if (!needChmod) { + return afterChmod() + } + return fs.chmod( + String(entry.absolute), + Number(entry.mode), + afterChmod, + ) + } + // Not a dir entry, have to remove it. + // NB: the only way to end up with an entry that is the cwd + // itself, in such a way that == does not detect, is a + // tricky windows absolute path with UNC or 8.3 parts (and + // preservePaths:true, or else it will have been stripped). + // In that case, the user has opted out of path protections + // explicitly, so if they blow away the cwd, c'est la vie. + if (entry.absolute !== this.cwd) { + return fs.rmdir( + String(entry.absolute), + (er?: null | Error) => + this[MAKEFS](er ?? null, entry, done), + ) + } + } + + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) { + return this[MAKEFS](null, entry, done) + } + + unlinkFile(String(entry.absolute), er => + this[MAKEFS](er ?? null, entry, done), + ) + }) + } + + if (this[CHECKED_CWD]) { + start() + } else { + checkCwd() + } + } + + [MAKEFS]( + er: null | undefined | Error, + entry: ReadEntry, + done: () => void, + ) { + if (er) { + this[ONERROR](er, entry) + done() + return + } + + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry, done) + + case 'Link': + return this[HARDLINK](entry, done) + + case 'SymbolicLink': + return this[SYMLINK](entry, done) + + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry, done) + } + } + + [LINK]( + entry: ReadEntry, + linkpath: string, + link: 'link' | 'symlink', + done: () => void, + ) { + // XXX: get the type ('symlink' or 'junction') for windows + fs[link](linkpath, String(entry.absolute), er => { + if (er) { + this[ONERROR](er, entry) + } else { + this[UNPEND]() + entry.resume() + } + done() + }) + } +} + +const callSync = (fn: () => any) => { + try { + return [null, fn()] + } catch (er) { + return [er, null] + } +} + +export class UnpackSync extends Unpack { + [MAKEFS](er: null | Error | undefined, entry: ReadEntry) { + return super[MAKEFS](er, entry, () => {}) + } + + [CHECKFS](entry: ReadEntry) { + this[PRUNECACHE](entry) + + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode) + if (er) { + return this[ONERROR](er as Error, entry) + } + this[CHECKED_CWD] = true + } + + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = normalizeWindowsPath( + path.dirname(String(entry.absolute)), + ) + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode) + if (mkParent) { + return this[ONERROR](mkParent as Error, entry) + } + } + } + + const [lstatEr, st] = callSync(() => + fs.lstatSync(String(entry.absolute)), + ) + if ( + st && + (this.keep || + (this.newer && st.mtime > (entry.mtime ?? st.mtime))) + ) { + return this[SKIP](entry) + } + + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry) + } + + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = + !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const [er] = needChmod + ? callSync(() => { + fs.chmodSync(String(entry.absolute), Number(entry.mode)) + }) + : [] + return this[MAKEFS](er, entry) + } + // not a dir entry, have to remove it + const [er] = callSync(() => + fs.rmdirSync(String(entry.absolute)), + ) + this[MAKEFS](er, entry) + } + + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = + entry.absolute === this.cwd + ? [] + : callSync(() => unlinkFileSync(String(entry.absolute))) + this[MAKEFS](er, entry) + } + + [FILE](entry: ReadEntry, done: () => void) { + const mode = + typeof entry.mode === 'number' + ? entry.mode & 0o7777 + : this.fmode + + const oner = (er?: null | Error | undefined) => { + let closeError + try { + fs.closeSync(fd) + } catch (e) { + closeError = e + } + if (er || closeError) { + this[ONERROR]((er as Error) || closeError, entry) + } + done() + } + + let fd: number + try { + fd = fs.openSync( + String(entry.absolute), + getWriteFlag(entry.size), + mode, + ) + } catch (er) { + return oner(er as Error) + } + const tx = this.transform ? this.transform(entry) || entry : entry + if (tx !== entry) { + tx.on('error', (er: Error) => this[ONERROR](er, entry)) + entry.pipe(tx) + } + + tx.on('data', (chunk: Buffer) => { + try { + fs.writeSync(fd, chunk, 0, chunk.length) + } catch (er) { + oner(er as Error) + } + }) + + tx.on('end', () => { + let er = null + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date() + const mtime = entry.mtime + try { + fs.futimesSync(fd, atime, mtime) + } catch (futimeser) { + try { + fs.utimesSync(String(entry.absolute), atime, mtime) + } catch (utimeser) { + er = futimeser + } + } + } + + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry) + const gid = this[GID](entry) + + try { + fs.fchownSync(fd, Number(uid), Number(gid)) + } catch (fchowner) { + try { + fs.chownSync(String(entry.absolute), Number(uid), Number(gid)) + } catch (chowner) { + er = er || fchowner + } + } + } + + oner(er as Error) + }) + } + + [DIRECTORY](entry: ReadEntry, done: () => void) { + const mode = + typeof entry.mode === 'number' + ? entry.mode & 0o7777 + : this.dmode + const er = this[MKDIR](String(entry.absolute), mode) + if (er) { + this[ONERROR](er as Error, entry) + done() + return + } + if (entry.mtime && !this.noMtime) { + try { + fs.utimesSync( + String(entry.absolute), + entry.atime || new Date(), + entry.mtime, + ) + } catch (er) {} + } + if (this[DOCHOWN](entry)) { + try { + fs.chownSync( + String(entry.absolute), + Number(this[UID](entry)), + Number(this[GID](entry)), + ) + } catch (er) {} + } + done() + entry.resume() + } + + [MKDIR](dir: string, mode: number) { + try { + return mkdirSync(normalizeWindowsPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + noChmod: this.noChmod, + }) + } catch (er) { + return er + } + } + + [LINK]( + entry: ReadEntry, + linkpath: string, + link: 'link' | 'symlink', + done: () => void, + ) { + const ls: `${typeof link}Sync` = `${link}Sync` + try { + fs[ls](linkpath, String(entry.absolute)) + done() + entry.resume() + } catch (er) { + return this[ONERROR](er as Error, entry) + } + } +} diff --git a/src/update.ts b/src/update.ts new file mode 100644 index 00000000..a049efd5 --- /dev/null +++ b/src/update.ts @@ -0,0 +1,62 @@ +// tar -u + +import { + dealias, + isFile, + type TarOptionsWithAliases, +} from './options.js' + +import { replace as r } from './replace.js' + +// just call tar.r with the filter and mtimeCache + +export const update = ( + opt_: TarOptionsWithAliases, + files: string[], + cb?: (er?: Error) => any, +) => { + const opt = dealias(opt_) + + if (!isFile(opt)) { + throw new TypeError('file is required') + } + + if ( + opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr') + ) { + throw new TypeError('cannot append to compressed archives') + } + + if (!files || !Array.isArray(files) || !files.length) { + throw new TypeError('no files or directories specified') + } + + files = Array.from(files) + mtimeFilter(opt) + + return r(opt, files, cb) +} + +const mtimeFilter = (opt: TarOptionsWithAliases) => { + const filter = opt.filter + + if (!opt.mtimeCache) { + opt.mtimeCache = new Map() + } + + opt.filter = filter + ? (path, stat) => + filter(path, stat) && + !( + (opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0) + ) + : (path, stat) => + !( + (opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0) + ) +} diff --git a/src/warn-method.ts b/src/warn-method.ts new file mode 100644 index 00000000..7f2b83e1 --- /dev/null +++ b/src/warn-method.ts @@ -0,0 +1,59 @@ +/** has a warn method */ +export type Warner = { + warn(code: string, message: string | Error, data: any): void + file?: string + cwd?: string + strict?: boolean + + emit( + event: 'warn', + code: string, + message: string, + data?: WarnData, + ): void + emit(event: 'error', error: TarError): void +} + +export type WarnData = { + file?: string + cwd?: string + code?: string + tarCode?: string + recoverable?: boolean + [k: string]: any +} + +export type TarError = Error & WarnData + +export const warnMethod = ( + self: Warner, + code: string, + message: string | Error, + data: WarnData = {}, +) => { + if (self.file) { + data.file = self.file + } + if (self.cwd) { + data.cwd = self.cwd + } + data.code = + (message instanceof Error && + (message as NodeJS.ErrnoException).code) || + code + data.tarCode = code + if (!self.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data) + message = message.message + } + self.emit('warn', code, message, data) + } else if (message instanceof Error) { + self.emit('error', Object.assign(message, data)) + } else { + self.emit( + 'error', + Object.assign(new Error(`${code}: ${message}`), data), + ) + } +} diff --git a/src/winchars.ts b/src/winchars.ts new file mode 100644 index 00000000..b8edeb36 --- /dev/null +++ b/src/winchars.ts @@ -0,0 +1,16 @@ +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. + +const raw = ['|', '<', '>', '?', ':'] + +const win = raw.map(char => + String.fromCharCode(0xf000 + char.charCodeAt(0)), +) + +const toWin = new Map(raw.map((char, i) => [char, win[i]])) +const toRaw = new Map(win.map((char, i) => [char, raw[i]])) + +export const encode = (s: string) => + raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s) +export const decode = (s: string) => + win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s) diff --git a/src/write-entry.ts b/src/write-entry.ts new file mode 100644 index 00000000..f9910c6d --- /dev/null +++ b/src/write-entry.ts @@ -0,0 +1,733 @@ +import fs, { type Stats } from 'fs' +import { Minipass } from 'minipass' +import path from 'path' +import { Header } from './header.js' +import { modeFix } from './mode-fix.js' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { + dealias, + LinkCacheKey, + TarOptions, + TarOptionsWithAliases, +} from './options.js' +import { Pax } from './pax.js' +import { ReadEntry } from './read-entry.js' +import { stripAbsolutePath } from './strip-absolute-path.js' +import { stripTrailingSlashes } from './strip-trailing-slashes.js' +import { EntryTypeName } from './types.js' +import { WarnData, Warner, warnMethod } from './warn-method.js' +import * as winchars from './winchars.js' + +const prefixPath = (path: string, prefix?: string) => { + if (!prefix) { + return normalizeWindowsPath(path) + } + path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '') + return stripTrailingSlashes(prefix) + '/' + path +} + +const maxReadSize = 16 * 1024 * 1024 + +const PROCESS = Symbol('process') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const HEADER = Symbol('header') +const READ = Symbol('read') +const LSTAT = Symbol('lstat') +const ONLSTAT = Symbol('onlstat') +const ONREAD = Symbol('onread') +const ONREADLINK = Symbol('onreadlink') +const OPENFILE = Symbol('openfile') +const ONOPENFILE = Symbol('onopenfile') +const CLOSE = Symbol('close') +const MODE = Symbol('mode') +const AWAITDRAIN = Symbol('awaitDrain') +const ONDRAIN = Symbol('ondrain') +const PREFIX = Symbol('prefix') + +export class WriteEntry extends Minipass implements Warner { + path: string + portable: boolean + myuid: number = (process.getuid && process.getuid()) || 0 + // until node has builtin pwnam functions, this'll have to do + myuser: string = process.env.USER || '' + maxReadSize: number + linkCache: Exclude + statCache: Exclude + preservePaths: boolean + cwd: string + strict: boolean + mtime?: Date + noPax: boolean + noMtime: boolean + prefix?: string + fd?: number + + blockLen: number = 0 + blockRemain: number = 0 + buf?: Buffer + pos: number = 0 + remain: number = 0 + length: number = 0 + offset: number = 0 + + win32: boolean + absolute: string + + header?: Header + type?: EntryTypeName | 'Unsupported' + linkpath?: string + stat?: Stats + /* c8 ignore start */ + + #hadError: boolean = false + + constructor(p: string, opt_: TarOptionsWithAliases = {}) { + const opt = dealias(opt_) + super() + this.path = normalizeWindowsPath(p) + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable + this.maxReadSize = opt.maxReadSize || maxReadSize + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.preservePaths = !!opt.preservePaths + this.cwd = normalizeWindowsPath(opt.cwd || process.cwd()) + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime + this.prefix = opt.prefix + ? normalizeWindowsPath(opt.prefix) + : undefined + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + let pathWarn = false + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root && typeof stripped === 'string') { + this.path = stripped + pathWarn = !!root + } + } + + this.win32 = !!opt.win32 || process.platform === 'win32' + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')) + p = p.replace(/\\/g, '/') + } + + this.absolute = normalizeWindowsPath( + opt.absolute || path.resolve(this.cwd, p), + ) + + if (this.path === '') { + this.path = './' + } + + if (pathWarn) { + this.warn( + 'TAR_ENTRY_INFO', + `stripping ${pathWarn} from absolute path`, + { + entry: this, + path: pathWarn + this.path, + }, + ) + } + + const cs = this.statCache.get(this.absolute) + if (cs) { + this[ONLSTAT](cs) + } else { + this[LSTAT]() + } + } + + warn(code: string, message: string | Error, data: WarnData = {}) { + return warnMethod(this, code, message, data) + } + + emit(ev: string, ...data: any[]) { + if (ev === 'error') { + this.#hadError = true + } + return super.emit(ev, ...data) + } + + [LSTAT]() { + fs.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er) + } + this[ONLSTAT](stat) + }) + } + + [ONLSTAT](stat: Stats) { + this.statCache.set(this.absolute, stat) + this.stat = stat + if (!stat.isFile()) { + stat.size = 0 + } + this.type = getType(stat) + this.emit('stat', stat) + this[PROCESS]() + } + + [PROCESS]() { + switch (this.type) { + case 'File': + return this[FILE]() + case 'Directory': + return this[DIRECTORY]() + case 'SymbolicLink': + return this[SYMLINK]() + // unsupported types are ignored. + default: + return this.end() + } + } + + [MODE](mode: number) { + return modeFix(mode, this.type === 'Directory', this.portable) + } + + [PREFIX](path: string) { + return prefixPath(path, this.prefix) + } + + [HEADER]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot write header before stat') + } + /* c8 ignore stop */ + + if (this.type === 'Directory' && this.portable) { + this.noMtime = true + } + + this.header = new Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: + this.type === 'Link' && this.linkpath !== undefined + ? this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? undefined : this.stat.uid, + gid: this.portable ? undefined : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + type: this.type === 'Unsupported' ? undefined : this.type, + uname: this.portable + ? undefined + : this.stat.uid === this.myuid + ? this.myuser + : '', + atime: this.portable ? undefined : this.stat.atime, + ctime: this.portable ? undefined : this.stat.ctime, + }) + + if (this.header.encode() && !this.noPax) { + super.write( + new Pax({ + atime: this.portable ? undefined : this.header.atime, + ctime: this.portable ? undefined : this.header.ctime, + gid: this.portable ? undefined : this.header.gid, + mtime: this.noMtime + ? undefined + : this.mtime || this.header.mtime, + path: this[PREFIX](this.path), + linkpath: + this.type === 'Link' && this.linkpath !== undefined + ? this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? undefined : this.header.uid, + uname: this.portable ? undefined : this.header.uname, + dev: this.portable ? undefined : this.stat.dev, + ino: this.portable ? undefined : this.stat.ino, + nlink: this.portable ? undefined : this.stat.nlink, + }).encode(), + ) + } + const block = this.header?.block + /* c8 ignore start */ + if (!block) { + throw new Error('failed to encode header') + } + /* c8 ignore stop */ + super.write(block) + } + + [DIRECTORY]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create directory entry without stat') + } + /* c8 ignore stop */ + if (this.path.slice(-1) !== '/') { + this.path += '/' + } + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [SYMLINK]() { + fs.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er) + } + this[ONREADLINK](linkpath) + }) + } + + [ONREADLINK](linkpath: string) { + this.linkpath = normalizeWindowsPath(linkpath) + this[HEADER]() + this.end() + } + + [HARDLINK](linkpath: string) { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create link entry without stat') + } + /* c8 ignore stop */ + this.type = 'Link' + this.linkpath = normalizeWindowsPath( + path.relative(this.cwd, linkpath), + ) + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [FILE]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create file entry without stat') + } + /* c8 ignore stop */ + if (this.stat.nlink > 1) { + const linkKey = + `${this.stat.dev}:${this.stat.ino}` as LinkCacheKey + const linkpath = this.linkCache.get(linkKey) + if (linkpath?.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath) + } + this.linkCache.set(linkKey, this.absolute) + } + + this[HEADER]() + if (this.stat.size === 0) { + return this.end() + } + + this[OPENFILE]() + } + + [OPENFILE]() { + fs.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er) + } + this[ONOPENFILE](fd) + }) + } + + [ONOPENFILE](fd: number) { + this.fd = fd + if (this.#hadError) { + return this[CLOSE]() + } + /* c8 ignore start */ + if (!this.stat) { + throw new Error('should stat before calling onopenfile') + } + /* c8 ignore start */ + + this.blockLen = 512 * Math.ceil(this.stat.size / 512) + this.blockRemain = this.blockLen + const bufLen = Math.min(this.blockLen, this.maxReadSize) + this.buf = Buffer.allocUnsafe(bufLen) + this.offset = 0 + this.pos = 0 + this.remain = this.stat.size + this.length = this.buf.length + this[READ]() + } + + [READ]() { + const { fd, buf, offset, length, pos } = this + if (fd === undefined || buf === undefined) { + throw new Error('cannot read file without first opening') + } + fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)) + } + this[ONREAD](bytesRead) + }) + } + + [CLOSE](cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}) { + if (this.fd !== undefined) fs.close(this.fd, cb) + } + + [ONREAD](bytesRead: number) { + if (bytesRead <= 0 && this.remain > 0) { + const er = Object.assign( + new Error('encountered unexpected EOF'), + { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }, + ) + return this[CLOSE](() => this.emit('error', er)) + } + + if (bytesRead > this.remain) { + const er = Object.assign( + new Error('did not encounter expected EOF'), + { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }, + ) + return this[CLOSE](() => this.emit('error', er)) + } + + /* c8 ignore start */ + if (!this.buf) { + throw new Error('should have created buffer prior to reading') + } + /* c8 ignore stop */ + + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for ( + let i = bytesRead; + i < this.length && bytesRead < this.blockRemain; + i++ + ) { + this.buf[i + this.offset] = 0 + bytesRead++ + this.remain++ + } + } + + const writeBuf = + this.offset === 0 && bytesRead === this.buf.length + ? this.buf + : this.buf.subarray(this.offset, this.offset + bytesRead) + + const flushed = this.write(writeBuf) + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()) + } else { + this[ONDRAIN]() + } + } + + [AWAITDRAIN](cb: () => any) { + this.once('drain', cb) + } + + write(writeBuf: Buffer) { + if (this.blockRemain < writeBuf.length) { + const er = Object.assign( + new Error('writing more data than expected'), + { + path: this.absolute, + }, + ) + return this.emit('error', er) + } + this.remain -= writeBuf.length + this.blockRemain -= writeBuf.length + this.pos += writeBuf.length + this.offset += writeBuf.length + return super.write(writeBuf) + } + + [ONDRAIN]() { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)) + } + return this[CLOSE](er => + er ? this.emit('error', er) : this.end(), + ) + } + + /* c8 ignore start */ + if (!this.buf) { + throw new Error('buffer lost somehow in ONDRAIN') + } + /* c8 ignore stop */ + + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe( + Math.min(this.blockRemain, this.buf.length), + ) + this.offset = 0 + } + this.length = this.buf.length - this.offset + this[READ]() + } +} + +export class WriteEntrySync extends WriteEntry implements Warner { + [LSTAT]() { + this[ONLSTAT](fs.lstatSync(this.absolute)) + } + + [SYMLINK]() { + this[ONREADLINK](fs.readlinkSync(this.absolute)) + } + + [OPENFILE]() { + this[ONOPENFILE](fs.openSync(this.absolute, 'r')) + } + + [READ]() { + let threw = true + try { + const { fd, buf, offset, length, pos } = this + /* c8 ignore start */ + if (fd === undefined || buf === undefined) { + throw new Error('fd and buf must be set in READ method') + } + /* c8 ignore stop */ + const bytesRead = fs.readSync(fd, buf, offset, length, pos) + this[ONREAD](bytesRead) + threw = false + } finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => {}) + } catch (er) {} + } + } + } + + [AWAITDRAIN](cb: () => any) { + cb() + } + + [CLOSE](cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}) { + if (this.fd !== undefined) fs.closeSync(this.fd) + cb() + } +} + +export class WriteEntryTar + extends Minipass + implements Warner +{ + blockLen: number = 0 + blockRemain: number = 0 + buf: number = 0 + pos: number = 0 + remain: number = 0 + length: number = 0 + preservePaths: boolean + portable: boolean + strict: boolean + noPax: boolean + noMtime: boolean + readEntry: ReadEntry + type: EntryTypeName + prefix?: string + path: string + mode?: number + uid?: number + gid?: number + uname?: string + gname?: string + header?: Header + mtime?: Date + atime?: Date + ctime?: Date + linkpath?: string + size: number + + warn(code: string, message: string | Error, data: WarnData = {}) { + return warnMethod(this, code, message, data) + } + + constructor( + readEntry: ReadEntry, + opt_: TarOptionsWithAliases = {}, + ) { + const opt = dealias(opt_) + super() + this.preservePaths = !!opt.preservePaths + this.portable = !!opt.portable + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + + this.readEntry = readEntry + this.type = readEntry.type + if (this.type === 'Directory' && this.portable) { + this.noMtime = true + } + + this.prefix = opt.prefix + + this.path = normalizeWindowsPath(readEntry.path) + this.mode = + readEntry.mode !== undefined + ? this[MODE](readEntry.mode) + : undefined + this.uid = this.portable ? undefined : readEntry.uid + this.gid = this.portable ? undefined : readEntry.gid + this.uname = this.portable ? undefined : readEntry.uname + this.gname = this.portable ? undefined : readEntry.gname + this.size = readEntry.size + this.mtime = this.noMtime + ? undefined + : opt.mtime || readEntry.mtime + this.atime = this.portable ? undefined : readEntry.atime + this.ctime = this.portable ? undefined : readEntry.ctime + this.linkpath = + readEntry.linkpath !== undefined + ? normalizeWindowsPath(readEntry.linkpath) + : undefined + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + let pathWarn: false | string = false + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root && typeof stripped === 'string') { + this.path = stripped + pathWarn = root + } + } + + this.remain = readEntry.size + this.blockRemain = readEntry.startBlockSize + + this.header = new Header({ + path: this[PREFIX](this.path), + linkpath: + this.type === 'Link' && this.linkpath !== undefined + ? this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? undefined : this.uid, + gid: this.portable ? undefined : this.gid, + size: this.size, + mtime: this.noMtime ? undefined : this.mtime, + type: this.type, + uname: this.portable ? undefined : this.uname, + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + }) + + if (pathWarn) { + this.warn( + 'TAR_ENTRY_INFO', + `stripping ${pathWarn} from absolute path`, + { + entry: this, + path: pathWarn + this.path, + }, + ) + } + + if (this.header.encode() && !this.noPax) { + super.write( + new Pax({ + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + gid: this.portable ? undefined : this.gid, + mtime: this.noMtime ? undefined : this.mtime, + path: this[PREFIX](this.path), + linkpath: + this.type === 'Link' && this.linkpath !== undefined + ? this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? undefined : this.uid, + uname: this.portable ? undefined : this.uname, + dev: this.portable ? undefined : this.readEntry.dev, + ino: this.portable ? undefined : this.readEntry.ino, + nlink: this.portable ? undefined : this.readEntry.nlink, + }).encode(), + ) + } + + const b = this.header?.block + /* c8 ignore start */ + if (!b) throw new Error('failed to encode header') + /* c8 ignore stop */ + super.write(b) + readEntry.pipe(this) + } + + [PREFIX](path: string) { + return prefixPath(path, this.prefix) + } + + [MODE](mode: number) { + return modeFix(mode, this.type === 'Directory', this.portable) + } + + write(data: Buffer) { + const writeLen = data.length + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate') + } + this.blockRemain -= writeLen + return super.write(data) + } + + end() { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)) + } + return super.end() + } +} + +const getType = (stat: Stats): EntryTypeName | 'Unsupported' => + stat.isFile() + ? 'File' + : stat.isDirectory() + ? 'Directory' + : stat.isSymbolicLink() + ? 'SymbolicLink' + : 'Unsupported' From 9522a698f9c70be24720a73f927908e35a51242c Mon Sep 17 00:00:00 2001 From: isaacs Date: Tue, 9 Apr 2024 23:40:42 -0700 Subject: [PATCH 02/12] finish typescript/esm port --- map.js | 15 +- package.json | 9 +- scripts/generate-parse-fixtures.js | 21 +- src/create.ts | 11 +- src/cwd-error.ts | 2 +- src/extract.ts | 2 +- src/get-write-flag.ts | 22 +- src/header.ts | 30 +- src/list.ts | 6 +- src/mkdir.ts | 2 + src/options.ts | 14 +- src/pack.ts | 126 +- src/parse.ts | 21 +- src/path-reservations.ts | 6 +- src/pax.ts | 21 +- src/read-entry.ts | 4 + src/replace.ts | 8 +- src/symlink-error.ts | 2 +- src/types.ts | 1 + src/unpack.ts | 22 +- src/update.ts | 4 + src/write-entry.ts | 27 +- .../test/normalize-unicode.js.test.cjs | 22 +- test/create.js | 235 ++- test/cwd-error.js | 10 + test/extract.js | 236 ++- test/fixtures/make-tar.js | 26 + .../parse/bad-cksum--filter-strict.json | 9 +- test/fixtures/parse/bad-cksum--filter.json | 9 +- .../bad-cksum--meta-250-filter-strict.json | 9 +- .../parse/bad-cksum--meta-250-filter.json | 9 +- .../parse/bad-cksum--meta-250-strict.json | 9 +- test/fixtures/parse/bad-cksum--meta-250.json | 9 +- test/fixtures/parse/bad-cksum--strict.json | 9 +- test/fixtures/parse/bad-cksum.json | 9 +- .../body-byte-counts--filter-strict.json | 36 +- .../parse/body-byte-counts--filter.json | 36 +- ...y-byte-counts--meta-250-filter-strict.json | 36 +- .../body-byte-counts--meta-250-filter.json | 36 +- .../body-byte-counts--meta-250-strict.json | 36 +- .../parse/body-byte-counts--meta-250.json | 36 +- .../parse/body-byte-counts--strict.json | 36 +- test/fixtures/parse/body-byte-counts.json | 36 +- test/fixtures/parse/dir--filter-strict.json | 9 +- test/fixtures/parse/dir--filter.json | 9 +- .../parse/dir--meta-250-filter-strict.json | 9 +- test/fixtures/parse/dir--meta-250-filter.json | 9 +- test/fixtures/parse/dir--meta-250-strict.json | 9 +- test/fixtures/parse/dir--meta-250.json | 9 +- test/fixtures/parse/dir--strict.json | 9 +- test/fixtures/parse/dir.json | 9 +- .../parse/emptypax--filter-strict.json | 28 +- test/fixtures/parse/emptypax--filter.json | 28 +- .../emptypax--meta-250-filter-strict.json | 28 +- .../parse/emptypax--meta-250-filter.json | 28 +- .../parse/emptypax--meta-250-strict.json | 28 +- test/fixtures/parse/emptypax--meta-250.json | 28 +- test/fixtures/parse/emptypax--strict.json | 28 +- test/fixtures/parse/emptypax.json | 28 +- test/fixtures/parse/file--filter-strict.json | 9 +- test/fixtures/parse/file--filter.json | 9 +- .../parse/file--meta-250-filter-strict.json | 9 +- .../fixtures/parse/file--meta-250-filter.json | 9 +- .../fixtures/parse/file--meta-250-strict.json | 9 +- test/fixtures/parse/file--meta-250.json | 9 +- test/fixtures/parse/file--strict.json | 9 +- test/fixtures/parse/file.json | 9 +- .../parse/global-header--filter-strict.json | 22 +- .../fixtures/parse/global-header--filter.json | 22 +- ...global-header--meta-250-filter-strict.json | 22 +- .../parse/global-header--meta-250-filter.json | 22 +- .../parse/global-header--meta-250-strict.json | 22 +- .../parse/global-header--meta-250.json | 22 +- .../fixtures/parse/global-header--strict.json | 22 +- test/fixtures/parse/global-header.json | 22 +- test/fixtures/parse/links--filter-strict.json | 25 +- test/fixtures/parse/links--filter.json | 25 +- .../parse/links--meta-250-filter-strict.json | 25 +- .../parse/links--meta-250-filter.json | 25 +- .../parse/links--meta-250-strict.json | 25 +- test/fixtures/parse/links--meta-250.json | 25 +- test/fixtures/parse/links--strict.json | 25 +- .../parse/links-invalid--filter-strict.json | 8 +- .../fixtures/parse/links-invalid--filter.json | 8 +- ...links-invalid--meta-250-filter-strict.json | 8 +- .../parse/links-invalid--meta-250-filter.json | 8 +- .../parse/links-invalid--meta-250-strict.json | 8 +- .../parse/links-invalid--meta-250.json | 8 +- .../fixtures/parse/links-invalid--strict.json | 8 +- test/fixtures/parse/links-invalid.json | 8 +- .../parse/links-strip--filter-strict.json | 42 +- test/fixtures/parse/links-strip--filter.json | 42 +- .../links-strip--meta-250-filter-strict.json | 42 +- .../parse/links-strip--meta-250-filter.json | 42 +- .../parse/links-strip--meta-250-strict.json | 42 +- .../fixtures/parse/links-strip--meta-250.json | 42 +- test/fixtures/parse/links-strip--strict.json | 42 +- test/fixtures/parse/links-strip.json | 42 +- test/fixtures/parse/links.json | 25 +- .../parse/long-paths--filter-strict.json | 338 +--- test/fixtures/parse/long-paths--filter.json | 338 +--- .../long-paths--meta-250-filter-strict.json | 335 +--- .../parse/long-paths--meta-250-filter.json | 335 +--- .../parse/long-paths--meta-250-strict.json | 335 +--- test/fixtures/parse/long-paths--meta-250.json | 335 +--- test/fixtures/parse/long-paths--strict.json | 338 +--- test/fixtures/parse/long-paths.json | 338 +--- .../parse/long-pax--filter-strict.json | 21 +- test/fixtures/parse/long-pax--filter.json | 21 +- .../long-pax--meta-250-filter-strict.json | 18 +- .../parse/long-pax--meta-250-filter.json | 18 +- .../parse/long-pax--meta-250-strict.json | 18 +- test/fixtures/parse/long-pax--meta-250.json | 18 +- test/fixtures/parse/long-pax--strict.json | 21 +- test/fixtures/parse/long-pax.json | 21 +- .../next-file-has-long--filter-strict.json | 15 +- .../parse/next-file-has-long--filter.json | 15 +- ...file-has-long--meta-250-filter-strict.json | 15 +- .../next-file-has-long--meta-250-filter.json | 15 +- .../next-file-has-long--meta-250-strict.json | 15 +- .../parse/next-file-has-long--meta-250.json | 15 +- .../parse/next-file-has-long--strict.json | 15 +- test/fixtures/parse/next-file-has-long.json | 15 +- .../parse/null-byte--filter-strict.json | 25 +- test/fixtures/parse/null-byte--filter.json | 25 +- .../null-byte--meta-250-filter-strict.json | 25 +- .../parse/null-byte--meta-250-filter.json | 25 +- .../parse/null-byte--meta-250-strict.json | 25 +- test/fixtures/parse/null-byte--meta-250.json | 25 +- test/fixtures/parse/null-byte--strict.json | 25 +- test/fixtures/parse/null-byte.json | 25 +- ...ling-slash-corner-case--filter-strict.json | 37 +- .../trailing-slash-corner-case--filter.json | 37 +- ...h-corner-case--meta-250-filter-strict.json | 37 +- ...ng-slash-corner-case--meta-250-filter.json | 37 +- ...ng-slash-corner-case--meta-250-strict.json | 37 +- .../trailing-slash-corner-case--meta-250.json | 37 +- .../trailing-slash-corner-case--strict.json | 37 +- .../parse/trailing-slash-corner-case.json | 37 +- test/fixtures/parse/utf8--filter-strict.json | 57 +- test/fixtures/parse/utf8--filter.json | 57 +- .../parse/utf8--meta-250-filter-strict.json | 57 +- .../fixtures/parse/utf8--meta-250-filter.json | 57 +- .../fixtures/parse/utf8--meta-250-strict.json | 57 +- test/fixtures/parse/utf8--meta-250.json | 57 +- test/fixtures/parse/utf8--strict.json | 57 +- test/fixtures/parse/utf8.json | 57 +- test/get-write-flag.js | 86 +- test/header.js | 579 +++--- test/high-level-opt.js | 42 - test/index.js | 13 +- test/large-numbers.js | 7 +- test/list.js | 158 +- test/load-all.js | 24 +- test/make-tar.js | 27 - test/map.js | 12 +- test/mode-fix.js | 27 +- test/normalize-unicode.js | 38 +- test/normalize-windows-path.js | 28 +- test/options.js | 65 + test/pack.js | 739 ++++--- test/parse.js | 649 ++++--- test/path-reservations.js | 52 +- test/pax.js | 200 +- test/read-entry.js | 120 +- test/replace.js | 31 +- test/strip-absolute-path.js | 40 +- test/strip-trailing-slashes.js | 10 +- test/symlink-error.js | 11 + test/types.js | 10 +- test/unpack.js | 1701 ++++++++++------- test/update.js | 29 +- test/{warn-mixin.js => warn-method.js} | 13 +- test/winchars.js | 5 +- test/write-entry.js | 673 ++++--- 175 files changed, 4341 insertions(+), 7377 deletions(-) create mode 100644 test/cwd-error.js create mode 100644 test/fixtures/make-tar.js delete mode 100644 test/high-level-opt.js delete mode 100644 test/make-tar.js create mode 100644 test/options.js create mode 100644 test/symlink-error.js rename test/{warn-mixin.js => warn-method.js} (81%) diff --git a/map.js b/map.js index 1d7e33ae..e7ef1050 100644 --- a/map.js +++ b/map.js @@ -1,9 +1,12 @@ -const { basename } = require('path') +import { basename } from 'path' const map = test => - test === 'index.js' || test === 'map.js' ? test - : test === 'unpack.js' ? ['lib/unpack.js', 'lib/mkdir.js'] - : test === 'load-all.js' ? [] - : `lib/${test}` + test === 'map.js' + ? test + : test === 'unpack.js' + ? ['src/unpack.ts', 'src/mkdir.ts'] + : test === 'load-all.js' + ? [] + : `src/${test.replace(/js$/, 'ts')}` -module.exports = test => map(basename(test)) +export default test => map(basename(test)) diff --git a/package.json b/package.json index 7cfcb4f5..57eaf9a1 100644 --- a/package.json +++ b/package.json @@ -25,17 +25,18 @@ "chownr": "^3.0.0", "minipass": "^5.0.0", "minizlib": "^3.0.1", - "mkdirp": "^3.0.1" + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" }, "devDependencies": { "chmodr": "^1.2.0", "end-of-stream": "^1.4.3", "events-to-array": "^2.0.3", "mutate-fs": "^2.1.1", - "nock": "^13.2.9", + "nock": "^13.5.4", "prettier": "^3.2.5", - "rimraf": "^3.0.2", - "tap": "^16.0.1", + "rimraf": "^5.0.5", + "tap": "^18.7.2", "tshy": "^1.13.1", "typedoc": "^0.25.13" }, diff --git a/scripts/generate-parse-fixtures.js b/scripts/generate-parse-fixtures.js index b46e8bd7..ff7d864f 100644 --- a/scripts/generate-parse-fixtures.js +++ b/scripts/generate-parse-fixtures.js @@ -1,9 +1,12 @@ -'use strict' -const Parse = require('../lib/parse.js') -const fs = require('fs') -const path = require('path') -const tardir = path.resolve(__dirname, '../test/fixtures/tars') -const parsedir = path.resolve(__dirname, '../test/fixtures/parse') +import { Parser } from '../dist/esm/parse.js' +import fs from 'fs' +import path, {dirname, resolve} from 'path' +import {fileURLToPath} from 'url' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) + +const tardir = resolve(__dirname, '../test/fixtures/tars') +const parsedir = resolve(__dirname, '../test/fixtures/parse') const maxMetaOpt = [250, null] const filterOpt = [true, false] const strictOpt = [true, false] @@ -16,9 +19,9 @@ const makeTest = (tarfile, tardata, maxMeta, filter, strict) => { const tail = (o ? '-' + o : '') + '.json' const eventsfile = parsedir + '/' + path.basename(tarfile, '.tar') + tail - const p = new Parse({ + const p = new Parser({ maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, + filter: filter ? (_path, entry) => entry.size % 2 !== 0 : null, strict: strict, }) const events = [] @@ -70,7 +73,7 @@ const makeTest = (tarfile, tardata, maxMeta, filter, strict) => { p.on('entry', pushEntry('entry')) p.on('ignoredEntry', pushEntry('ignoredEntry')) - p.on('warn', (code, message, data) => events.push(['warn', code, message])) + p.on('warn', (code, message, _data) => events.push(['warn', code, message])) p.on('error', er => events.push(['error', { message: er.message, code: er.code, diff --git a/src/create.ts b/src/create.ts index b97eb2e8..85b27c38 100644 --- a/src/create.ts +++ b/src/create.ts @@ -130,16 +130,17 @@ const addFilesSync = (p: PackSync, files: string[]) => { const addFilesAsync = async ( p: Pack, files: string[], - i = 0, ): Promise => { - for (; i < files.length; i++) { + for (let i = 0; i < files.length; i++) { const file = String(files[i]) if (file.charAt(0) === '@') { - return list({ + await list({ file: path.resolve(String(p.cwd), file.slice(1)), noResume: true, - onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) + onentry: entry => { + p.add(entry) + }, + }) } else { p.add(file) } diff --git a/src/cwd-error.ts b/src/cwd-error.ts index 7a708ed4..bdc48046 100644 --- a/src/cwd-error.ts +++ b/src/cwd-error.ts @@ -4,7 +4,7 @@ export class CwdError extends Error { syscall: 'chdir' = 'chdir' constructor(path: string, code: string) { - super(code + ": Cannot cd into '" + path + "'") + super(`${code}: Cannot cd into '${path}'`) this.path = path this.code = code } diff --git a/src/extract.ts b/src/extract.ts index f79f9408..b3cb3b0b 100644 --- a/src/extract.ts +++ b/src/extract.ts @@ -132,7 +132,7 @@ const extractFileSync = (opt: TarOptionsSyncFile) => { const extractFile = ( opt: TarOptionsFile, - cb: () => void = () => {}, + cb?: () => void, ) => { const u = new Unpack(opt) const readSize = opt.maxReadSize || 16 * 1024 * 1024 diff --git a/src/get-write-flag.ts b/src/get-write-flag.ts index db358591..bd8842dc 100644 --- a/src/get-write-flag.ts +++ b/src/get-write-flag.ts @@ -5,21 +5,19 @@ // library is used for is extracting tarballs of many // relatively small files in npm packages and the like, // it can be a big boost on Windows platforms. -// Only supported in Node v12.9.0 and above. + +import fs from 'fs' + const platform = process.env.__FAKE_PLATFORM__ || process.platform const isWindows = platform === 'win32' -const g = globalThis as typeof globalThis & { - __FAKE_TESTING_FS__: typeof import('fs') -} -const fs = g.__FAKE_TESTING_FS__ || require('fs') -/* istanbul ignore next */ -const { - O_CREAT, - O_TRUNC, - O_WRONLY, - UV_FS_O_FILEMAP = 0, -} = fs.constants +/* c8 ignore start */ +const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants +const UV_FS_O_FILEMAP = + Number(process.env.__FAKE_FS_O_FILENAME__) || + fs.constants.UV_FS_O_FILEMAP || + 0 +/* c8 ignore stop */ const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP const fMapLimit = 512 * 1024 diff --git a/src/header.ts b/src/header.ts index b9efbda6..e648876e 100644 --- a/src/header.ts +++ b/src/header.ts @@ -15,7 +15,7 @@ export type HeaderData = { gid?: number size?: number cksum?: number - type?: EntryTypeCode | EntryTypeName + type?: EntryTypeName | 'Unsupported' linkpath?: string uname?: string gname?: string @@ -46,7 +46,7 @@ export class Header implements HeaderData { gid?: number size?: number cksum?: number - #type: EntryTypeCode = '0' + #type: EntryTypeCode | 'Unsupported' = 'Unsupported' linkpath?: string uname?: string gname?: string @@ -101,10 +101,8 @@ export class Header implements HeaderData { // old tar versions marked dirs as a file with a trailing / const t = decString(buf, off + 156, 1) - if (types.isCode(t)) this.#type = t - else this.#type = '0' - if (this.#type === '') { - this.#type = '0' + if (types.isCode(t)) { + this.#type = t || '0' } if (this.#type === '0' && this.path.slice(-1) === '/') { this.#type = '5' @@ -126,8 +124,10 @@ export class Header implements HeaderData { ) { this.uname = decString(buf, off + 265, 32) this.gname = decString(buf, off + 297, 32) + /* c8 ignore start */ this.devmaj = decNumber(buf, off + 329, 8) ?? 0 this.devmin = decNumber(buf, off + 337, 8) ?? 0 + /* c8 ignore stop */ if (buf[off + 475] !== 0) { // definitely a prefix, definitely >130 chars. const prefix = decString(buf, off + 345, 155) @@ -152,7 +152,7 @@ export class Header implements HeaderData { } this.cksumValid = sum === this.cksum - if (this.cksum === null && sum === 8 * 0x20) { + if (this.cksum === undefined && sum === 8 * 0x20) { this.nullBlock = true } } @@ -180,6 +180,10 @@ export class Header implements HeaderData { buf = this.block = Buffer.alloc(512) } + if (this.#type === 'Unsupported') { + this.#type = '0' + } + if (!(buf.length >= off + 512)) { throw new Error('need 512 bytes for header') } @@ -244,16 +248,20 @@ export class Header implements HeaderData { } get type(): EntryTypeName { - return types.name.get(this.#type) as EntryTypeName + return ( + this.#type === 'Unsupported' + ? this.#type + : types.name.get(this.#type) + ) as EntryTypeName } - get typeKey(): EntryTypeCode { + get typeKey(): EntryTypeCode | 'Unsupported' { return this.#type } - set type(type: EntryTypeCode | EntryTypeName) { + set type(type: EntryTypeCode | EntryTypeName | 'Unsupported') { const c = String(types.code.get(type as EntryTypeName)) - if (types.isCode(c)) { + if (types.isCode(c) || c === 'Unsupported') { this.#type = c } else if (types.isCode(type)) { this.#type = type diff --git a/src/list.ts b/src/list.ts index a31a4397..7ba43143 100644 --- a/src/list.ts +++ b/src/list.ts @@ -8,6 +8,7 @@ import { isSyncFile, TarOptions, TarOptionsFile, + TarOptionsSyncFile, TarOptionsWithAliases, TarOptionsWithAliasesFile, TarOptionsWithAliasesSync, @@ -126,7 +127,7 @@ const filesFilter = (opt: TarOptions, files: string[]) => { : file => mapHas(stripTrailingSlashes(file)) } -const listFileSync = (opt: TarOptionsWithAliasesSyncFile) => { +const listFileSync = (opt: TarOptionsSyncFile) => { const p = list_(opt) const file = opt.file let threw = true @@ -152,12 +153,13 @@ const listFileSync = (opt: TarOptionsWithAliasesSyncFile) => { if (threw && fd) { try { fs.closeSync(fd) + /* c8 ignore next */ } catch (er) {} } } } -const listFile = (opt: TarOptionsFile, cb?: () => void) => { +const listFile = (opt: TarOptionsFile, cb?: () => void): Promise => { const parse = new Parser(opt) const readSize = opt.maxReadSize || 16 * 1024 * 1024 diff --git a/src/mkdir.ts b/src/mkdir.ts index 6c84654b..cf98d1ac 100644 --- a/src/mkdir.ts +++ b/src/mkdir.ts @@ -65,6 +65,7 @@ export const mkdir = ( // if there's any overlap between mask and mode, // then we'll need an explicit chmod + /* c8 ignore next */ const umask = opt.umask ?? 0o22 const mode = opt.mode | 0o0700 const needChmod = (mode & umask) !== 0 @@ -215,6 +216,7 @@ export const mkdirSync = (dir: string, opt: MkdirOptions) => { dir = normalizeWindowsPath(dir) // if there's any overlap between mask and mode, // then we'll need an explicit chmod + /* c8 ignore next */ const umask = opt.umask ?? 0o22 const mode = opt.mode | 0o700 const needChmod = (mode & umask) !== 0 diff --git a/src/options.ts b/src/options.ts index 4449b718..a612f142 100644 --- a/src/options.ts +++ b/src/options.ts @@ -435,18 +435,18 @@ export interface TarOptionsWithAliases extends TarOptions { export type TarOptionsWithAliasesSync = TarOptionsWithAliases & { sync: true } -export type TarOptionsWithAliasesFile = TarOptionsWithAliases & { - file: string -} +export type TarOptionsWithAliasesFile = + | (TarOptionsWithAliases & { + file: string + }) + | (TarOptionsWithAliases & { f: string }) export type TarOptionsWithAliasesSyncFile = TarOptionsWithAliasesSync & TarOptionsWithAliasesFile export const isSyncFile = (o: TarOptions): o is TarOptionsSyncFile => !!o.sync && !!o.file -export const isSync = (o: TarOptions): o is TarOptionsSync => - !!o.sync -export const isFile = (o: TarOptions): o is TarOptionsFile => - !!o.file +export const isSync = (o: TarOptions): o is TarOptionsSync => !!o.sync +export const isFile = (o: TarOptions): o is TarOptionsFile => !!o.file const dealiasKey = ( k: keyof TarOptionsWithAliases, diff --git a/src/pack.ts b/src/pack.ts index 4dc7e3b5..07d6500f 100644 --- a/src/pack.ts +++ b/src/pack.ts @@ -8,17 +8,6 @@ // streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) import fs, { type Stats } from 'fs' -import { Minipass } from 'minipass' -import { BrotliCompress, Gzip } from 'minizlib' -import path from 'path' -import { normalizeWindowsPath } from './normalize-windows-path.js' -import { dealias, LinkCacheKey, TarOptions } from './options.js' -import { ReadEntry } from './read-entry.js' -import { - warnMethod, - type WarnData, - type Warner, -} from './warn-method.js' import { WriteEntry, WriteEntrySync, @@ -34,13 +23,23 @@ export class PackJob { pending: boolean = false ignore: boolean = false piped: boolean = false - constructor(path: string, absolute: string) { this.path = path || './' this.absolute = absolute } } +import { Minipass } from 'minipass' +import * as zlib from 'minizlib' +//@ts-ignore +import { Yallist } from 'yallist' +import { ReadEntry } from './read-entry.js' +import { + warnMethod, + type WarnData, + type Warner, +} from './warn-method.js' + const EOF = Buffer.alloc(1024) const ONSTAT = Symbol('onStat') const ENDED = Symbol('ended') @@ -63,20 +62,24 @@ const WRITEENTRYCLASS = Symbol('writeEntryClass') const WRITE = Symbol('write') const ONDRAIN = Symbol('ondrain') +import path from 'path' +import { normalizeWindowsPath } from './normalize-windows-path.js' +import { TarOptions } from './options.js' + export class Pack extends Minipass implements Warner { opt: TarOptions - file: string cwd: string maxReadSize?: number preservePaths: boolean strict: boolean noPax: boolean prefix: string - linkCache: Map - statCache: Map - readdirCache: Map + linkCache: Exclude + statCache: Exclude + file: string portable: boolean - zip?: Gzip | BrotliCompress + zip?: zlib.BrotliCompress | zlib.Gzip + readdirCache: Exclude noDirRecurse: boolean follow: boolean noMtime: boolean @@ -84,17 +87,14 @@ export class Pack extends Minipass implements Warner { filter: Exclude jobs: number; - [WRITEENTRYCLASS]: - | typeof WriteEntry - | typeof WriteEntrySync - [QUEUE]: PackJob[] = []; + [WRITEENTRYCLASS]: typeof WriteEntry | typeof WriteEntrySync; + [QUEUE]: Yallist; [JOBS]: number = 0; [PROCESSING]: boolean = false; [ENDED]: boolean = false - constructor(opt_: TarOptions = {}) { + constructor(opt: TarOptions = {}) { super() - const opt = dealias(opt_) this.opt = opt this.file = opt.file || '' this.cwd = opt.cwd || process.cwd() @@ -125,15 +125,17 @@ export class Pack extends Minipass implements Warner { if (this.portable) { opt.gzip.portable = true } - this.zip = new Gzip(opt.gzip) + this.zip = new zlib.Gzip(opt.gzip) } if (opt.brotli) { if (typeof opt.brotli !== 'object') { opt.brotli = {} } - this.zip = new BrotliCompress(opt.brotli) + this.zip = new zlib.BrotliCompress(opt.brotli) } - const zip = this.zip as Gzip | BrotliCompress + /* c8 ignore next */ + if (!this.zip) throw new Error('impossible') + const zip = this.zip zip.on('data', chunk => super.write(chunk)) zip.on('end', () => super.end()) zip.on('drain', () => this[ONDRAIN]()) @@ -145,21 +147,18 @@ export class Pack extends Minipass implements Warner { this.noDirRecurse = !!opt.noDirRecurse this.follow = !!opt.follow this.noMtime = !!opt.noMtime - this.mtime = opt.mtime + if (opt.mtime) this.mtime = opt.mtime this.filter = - typeof opt.filter === 'function' ? opt.filter : () => true + typeof opt.filter === 'function' ? opt.filter : _ => true + this[QUEUE] = new Yallist() this[JOBS] = 0 this.jobs = Number(opt.jobs) || 4 this[PROCESSING] = false this[ENDED] = false } - warn(code: string, message: string | Error, data: WarnData = {}) { - return warnMethod(this, code, message, data) - } - [WRITE](chunk: Buffer) { return super.write(chunk) } @@ -169,20 +168,10 @@ export class Pack extends Minipass implements Warner { return this } - end(cb?: () => void): this - end(path: string, cb?: () => void): this - end( - path: string, - encoding?: Minipass.Encoding | undefined, - cb?: () => void, - ): this - end( - path?: string | (() => void), - _encoding?: Minipass.Encoding | (() => void), - _cb?: () => void, - ) { - if (typeof path === 'string') { - this.write(path) + //@ts-ignore + end(path?: string | ReadEntry) { + if (path) { + this.add(path) } this[ENDED] = true this[PROCESS]() @@ -213,7 +202,7 @@ export class Pack extends Minipass implements Warner { } else { const job = new PackJob(p.path, absolute) job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) - job.entry.on('end', _ => this[JOBDONE](job)) + job.entry.on('end', () => this[JOBDONE](job)) this[JOBS] += 1 this[QUEUE].push(job) } @@ -280,13 +269,15 @@ export class Pack extends Minipass implements Warner { this[PROCESSING] = true for ( - let j: PackJob | undefined, w = 0; - (j = this[QUEUE][w]) && this[JOBS] < this.jobs; - w ++ + let w = this[QUEUE].head; + !!w && this[JOBS] < this.jobs; + w = w.next ) { - this[PROCESSJOB](j) - if (j.ignore) { - this[QUEUE].splice(w, 1) + this[PROCESSJOB](w.value) + if (w.value.ignore) { + const p = w.next + this[QUEUE].removeNode(w) + w.next = p } } @@ -303,7 +294,7 @@ export class Pack extends Minipass implements Warner { } get [CURRENT]() { - return this[QUEUE] && this[QUEUE][0] + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value } [JOBDONE](_job: PackJob) { @@ -346,9 +337,8 @@ export class Pack extends Minipass implements Warner { job.stat.isDirectory() && !job.readdir ) { - const rc = this.readdirCache.get(job.absolute) - if ( rc) { + if (rc) { this[ONREADDIR](job, rc) } else { this[READDIR](job) @@ -391,8 +381,7 @@ export class Pack extends Minipass implements Warner { [ENTRY](job: PackJob) { this[JOBS] += 1 try { - const Cls = this[WRITEENTRYCLASS] - return new Cls(job.path, this[ENTRYOPT](job)) + return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) .on('end', () => this[JOBDONE](job)) .on('error', er => this.emit('error', er)) } catch (er) { @@ -420,11 +409,8 @@ export class Pack extends Minipass implements Warner { const source = job.entry const zip = this.zip - /* c8 ignore start */ - if (!source) { - throw new Error('must have source before piping') - } + if (!source) throw new Error('cannot pipe without source') /* c8 ignore stop */ if (zip) { @@ -448,6 +434,13 @@ export class Pack extends Minipass implements Warner { } return super.pause() } + warn( + code: string, + message: string | Error, + data: WarnData = {}, + ): void { + warnMethod(this, code, message, data) + } } export class PackSync extends Pack { @@ -472,11 +465,6 @@ export class PackSync extends Pack { // gotta get it all in this tick [PIPE](job: PackJob) { const source = job.entry - /* c8 ignore start */ - if (!source) { - throw new Error('job without source') - } - /* c8 ignore stop */ const zip = this.zip if (job.readdir) { @@ -487,6 +475,10 @@ export class PackSync extends Pack { }) } + /* c8 ignore start */ + if (!source) throw new Error('Cannot pipe without source') + /* c8 ignore stop */ + if (zip) { source.on('data', chunk => { zip.write(chunk) diff --git a/src/parse.ts b/src/parse.ts index 7912fef0..d0b0781e 100644 --- a/src/parse.ts +++ b/src/parse.ts @@ -20,6 +20,7 @@ import { EventEmitter as EE } from 'events' import { BrotliDecompress, Unzip } from 'minizlib' +import { Yallist } from 'yallist' import { Header } from './header.js' import { TarOptions } from './options.js' import { Pax } from './pax.js' @@ -78,7 +79,8 @@ export class Parser extends EE implements Warner { writable: true = true readable: false = false; - [QUEUE]: (ReadEntry | [string | symbol, any, any])[] = []; + [QUEUE]: Yallist = + new Yallist(); [BUFFER]?: Buffer; [READENTRY]?: ReadEntry; [WRITEENTRY]?: ReadEntry; @@ -161,7 +163,7 @@ export class Parser extends EE implements Warner { } [CONSUMEHEADER](chunk: Buffer, position: number) { - if (this[SAW_VALID_ENTRY] === null) { + if (this[SAW_VALID_ENTRY] === undefined) { this[SAW_VALID_ENTRY] = false } let header @@ -322,8 +324,8 @@ export class Parser extends EE implements Warner { if (!entry) { throw new Error('attempt to consume body without entry??') } - /* c8 ignore stop */ const br = entry.blockRemain ?? 0 + /* c8 ignore stop */ const c = br >= chunk.length && position === 0 ? chunk @@ -387,9 +389,10 @@ export class Parser extends EE implements Warner { break } - /* istanbul ignore next */ + /* c8 ignore start */ default: throw new Error('unknown meta: ' + entry.type) + /* c8 ignore stop */ } } @@ -422,7 +425,7 @@ export class Parser extends EE implements Warner { // look for gzip header for ( let i = 0; - this[UNZIP] === null && i < gzipHeader.length; + this[UNZIP] === undefined && i < gzipHeader.length; i++ ) { if (chunk[i] !== gzipHeader[i]) { @@ -456,7 +459,7 @@ export class Parser extends EE implements Warner { } if ( - this[UNZIP] === null || + this[UNZIP] === undefined || (this[UNZIP] === false && this.brotli) ) { const ended = this[ENDED] @@ -595,9 +598,10 @@ export class Parser extends EE implements Warner { position += this[CONSUMEMETA](chunk, position) break - /* istanbul ignore next */ + /* c8 ignore start */ default: throw new Error('invalid state: ' + this[STATE]) + /* c8 ignore stop */ } } @@ -616,13 +620,16 @@ export class Parser extends EE implements Warner { end(chunk?: Buffer) { if (!this[ABORTED]) { if (this[UNZIP]) { + /* c8 ignore start */ if (chunk) this[UNZIP].write(chunk) + /* c8 ignore stop */ this[UNZIP].end() } else { this[ENDED] = true if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0) if (chunk) this.write(chunk) + this[MAYBEEND]() } } } diff --git a/src/path-reservations.ts b/src/path-reservations.ts index 711e9ee2..79407710 100644 --- a/src/path-reservations.ts +++ b/src/path-reservations.ts @@ -95,10 +95,11 @@ export class PathReservations { dirs: (Handler | Set)[][] } { const res = this.#reservations.get(fn) - /* istanbul ignore if - unpossible */ + /* c8 ignore start */ if (!res) { throw new Error('function does not have any path reservations') } + /* c8 ignore stop */ return { paths: res.paths.map((path: string) => this.#queues.get(path), @@ -168,7 +169,8 @@ export class PathReservations { for (const dir of dirs) { const q = this.#queues.get(dir) const q0 = q?.[0] - if (!(q0 instanceof Set) || !q) continue + /* c8 ignore next - type safety only */ + if (!q || !(q0 instanceof Set)) continue if (q0.size === 1 && q.length === 1) { this.#queues.delete(dir) continue diff --git a/src/pax.ts b/src/pax.ts index 7c542c95..680c9234 100644 --- a/src/pax.ts +++ b/src/pax.ts @@ -28,18 +28,19 @@ export class Pax implements HeaderData { this.atime = obj.atime this.charset = obj.charset this.comment = obj.comment + this.ctime = obj.ctime + this.dev = obj.dev this.gid = obj.gid + this.global = global this.gname = obj.gname + this.ino = obj.ino this.linkpath = obj.linkpath this.mtime = obj.mtime + this.nlink = obj.nlink this.path = obj.path this.size = obj.size this.uid = obj.uid this.uname = obj.uname - this.dev = obj.dev - this.ino = obj.ino - this.nlink = obj.nlink - this.global = global } encode() { @@ -63,7 +64,9 @@ export class Pax implements HeaderData { // XXX split the path // then the path should be PaxHeader + basename, but less than 99, // prepend with the dirname - path: ('PaxHeader/' + basename(this.path || '')).slice(0, 99), + /* c8 ignore start */ + path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99), + /* c8 ignore stop */ mode: this.mode || 0o644, uid: this.uid, gid: this.gid, @@ -163,14 +166,12 @@ const parseKVLine = (set: Record, line: string) => { const kv = line.split('=') const r = kv.shift() - /* c8 ignore next */ - if (!r) throw new Error('fell of key/value list somehow') - - const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1') - if (!k) { + if (!r) { return set } + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1') + const v = kv.join('=') set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ? new Date(Number(v) * 1000) diff --git a/src/read-entry.ts b/src/read-entry.ts index 98260ee7..a4ca7eb7 100644 --- a/src/read-entry.ts +++ b/src/read-entry.ts @@ -42,7 +42,9 @@ export class ReadEntry extends Minipass { this.extended = ex this.globalExtended = gex this.header = header + /* c8 ignore start */ this.remain = header.size ?? 0 + /* c8 ignore stop */ this.startBlockSize = 512 * Math.ceil(this.remain / 512) this.blockRemain = this.startBlockSize this.type = header.type @@ -93,9 +95,11 @@ export class ReadEntry extends Minipass { this.mtime = header.mtime this.atime = header.atime this.ctime = header.ctime + /* c8 ignore start */ this.linkpath = header.linkpath ? normalizeWindowsPath(header.linkpath) : undefined + /* c8 ignore stop */ this.uname = header.uname this.gname = header.gname diff --git a/src/replace.ts b/src/replace.ts index 1d45445d..579b9b78 100644 --- a/src/replace.ts +++ b/src/replace.ts @@ -219,6 +219,7 @@ const replace_ = ( return cb(null, position) } + /* c8 ignore next */ const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512) if (position + entryBlockSize + 512 > size) { return cb(null, position) @@ -299,16 +300,15 @@ const addFilesSync = (p: Pack, files: string[]) => { const addFilesAsync = async ( p: Pack, files: string[], - i = 0, ): Promise => { - for (; i < files.length; i++) { + for (let i = 0; i < files.length; i++) { const file = String(files[i]) if (file.charAt(0) === '@') { - return list({ + await list({ file: path.resolve(String(p.cwd), file.slice(1)), noResume: true, onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) + }) } else { p.add(file) } diff --git a/src/symlink-error.ts b/src/symlink-error.ts index c237cb94..7aa5ea78 100644 --- a/src/symlink-error.ts +++ b/src/symlink-error.ts @@ -4,7 +4,7 @@ export class SymlinkError extends Error { syscall: 'symlink' = 'symlink' code: 'TAR_SYMLINK_ERROR' = 'TAR_SYMLINK_ERROR' constructor (symlink: string, path: string) { - super('Cannot extract through symbolic link') + super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link') this.symlink = symlink this.path = path } diff --git a/src/types.ts b/src/types.ts index 080995a6..96b8f74b 100644 --- a/src/types.ts +++ b/src/types.ts @@ -49,6 +49,7 @@ export type EntryTypeName = | 'SparseFile' | 'TapeVolumeHeader' | 'OldExtendedHeader' + | 'Unsupported' // map types from key to human-friendly name export const name = new Map([ diff --git a/src/unpack.ts b/src/unpack.ts index 064bbcf9..26e857cc 100644 --- a/src/unpack.ts +++ b/src/unpack.ts @@ -68,7 +68,7 @@ const DEFAULT_MAX_DEPTH = 1024 // semantics. // // See: https://github.com/npm/node-tar/issues/183 -/* istanbul ignore next */ +/* c8 ignore start */ const unlinkFile = ( path: string, cb: (er?: Error | null) => void, @@ -85,8 +85,9 @@ const unlinkFile = ( fs.unlink(name, cb) }) } +/* c8 ignore stop */ -/* istanbul ignore next */ +/* c8 ignore start */ const unlinkFileSync = (path: string) => { if (!isWindows) { return fs.unlinkSync(path) @@ -96,6 +97,7 @@ const unlinkFileSync = (path: string) => { fs.renameSync(path, name) fs.unlinkSync(name) } +/* c8 ignore stop */ // this.gid, entry.gid, this.processUid const uint32 = ( @@ -331,6 +333,7 @@ export class Unpack extends Parser { if (!this.preservePaths) { if ( parts.includes('..') || + /* c8 ignore next */ (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? '')) ) { this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { @@ -366,7 +369,7 @@ export class Unpack extends Parser { // if we somehow ended up with a path that escapes the cwd, and we are // not in preservePaths mode, then something is fishy! This should have // been prevented above, so ignore this for coverage. - /* istanbul ignore if - defense in depth */ + /* c8 ignore start - defense in depth */ if ( !this.preservePaths && typeof entry.absolute === 'string' && @@ -381,6 +384,7 @@ export class Unpack extends Parser { }) return false } + /* c8 ignore stop */ // an archive can set properties on the extraction directory, but it // may not replace the cwd with a different kind of thing entirely. @@ -522,10 +526,11 @@ export class Unpack extends Parser { let actions = 1 const done = (er?: null | Error) => { if (er) { - /* istanbul ignore else - we should always have a fd by now */ + /* c8 ignore start - we should always have a fd by now */ if (stream.fd) { fs.close(stream.fd, () => {}) } + /* c8 ignore stop */ this[ONERROR](er, entry) fullyDone() @@ -758,6 +763,7 @@ export class Unpack extends Parser { if ( st && (this.keep || + /* c8 ignore next */ (this.newer && st.mtime > (entry.mtime ?? st.mtime))) ) { this[SKIP](entry) @@ -912,6 +918,7 @@ export class UnpackSync extends Unpack { if ( st && (this.keep || + /* c8 ignore next */ (this.newer && st.mtime > (entry.mtime ?? st.mtime))) ) { return this[SKIP](entry) @@ -1019,7 +1026,11 @@ export class UnpackSync extends Unpack { fs.fchownSync(fd, Number(uid), Number(gid)) } catch (fchowner) { try { - fs.chownSync(String(entry.absolute), Number(uid), Number(gid)) + fs.chownSync( + String(entry.absolute), + Number(uid), + Number(gid), + ) } catch (chowner) { er = er || fchowner } @@ -1048,6 +1059,7 @@ export class UnpackSync extends Unpack { entry.atime || new Date(), entry.mtime, ) + /* c8 ignore next */ } catch (er) {} } if (this[DOCHOWN](entry)) { diff --git a/src/update.ts b/src/update.ts index a049efd5..d30b2daa 100644 --- a/src/update.ts +++ b/src/update.ts @@ -51,12 +51,16 @@ const mtimeFilter = (opt: TarOptionsWithAliases) => { ? (path, stat) => filter(path, stat) && !( + /* c8 ignore start */ (opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > (stat.mtime ?? 0) + /* c8 ignore stop */ ) : (path, stat) => !( + /* c8 ignore start */ (opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > (stat.mtime ?? 0) + /* c8 ignore stop */ ) } diff --git a/src/write-entry.ts b/src/write-entry.ts index f9910c6d..5d728585 100644 --- a/src/write-entry.ts +++ b/src/write-entry.ts @@ -80,7 +80,7 @@ export class WriteEntry extends Minipass implements Warner { type?: EntryTypeName | 'Unsupported' linkpath?: string stat?: Stats - /* c8 ignore start */ + /* c8 ignore start */ #hadError: boolean = false @@ -107,12 +107,12 @@ export class WriteEntry extends Minipass implements Warner { this.on('warn', opt.onwarn) } - let pathWarn = false + let pathWarn: string | boolean = false if (!this.preservePaths) { const [root, stripped] = stripAbsolutePath(this.path) if (root && typeof stripped === 'string') { this.path = stripped - pathWarn = !!root + pathWarn = root } } @@ -229,6 +229,7 @@ export class WriteEntry extends Minipass implements Warner { gid: this.portable ? undefined : this.stat.gid, size: this.stat.size, mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + /* c8 ignore next */ type: this.type === 'Unsupported' ? undefined : this.type, uname: this.portable ? undefined @@ -385,7 +386,11 @@ export class WriteEntry extends Minipass implements Warner { }) } - [CLOSE](cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}) { + /* c8 ignore start */ + [CLOSE]( + cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}, + ) { + /* c8 ignore stop */ if (this.fd !== undefined) fs.close(this.fd, cb) } @@ -541,7 +546,11 @@ export class WriteEntrySync extends WriteEntry implements Warner { cb() } - [CLOSE](cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}) { + /* c8 ignore start */ + [CLOSE]( + cb: (er?: null | Error | NodeJS.ErrnoException) => any = () => {}, + ) { + /* c8 ignore stop */ if (this.fd !== undefined) fs.closeSync(this.fd) cb() } @@ -595,7 +604,13 @@ export class WriteEntryTar this.noMtime = !!opt.noMtime this.readEntry = readEntry - this.type = readEntry.type + const { type } = readEntry + /* c8 ignore start */ + if (type === 'Unsupported') { + throw new Error('writing entry that should be ignored') + } + /* c8 ignore stop */ + this.type = type if (this.type === 'Directory' && this.portable) { this.noMtime = true } diff --git a/tap-snapshots/test/normalize-unicode.js.test.cjs b/tap-snapshots/test/normalize-unicode.js.test.cjs index 3163313d..6c7be82a 100644 --- a/tap-snapshots/test/normalize-unicode.js.test.cjs +++ b/tap-snapshots/test/normalize-unicode.js.test.cjs @@ -5,26 +5,26 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/normalize-unicode.js TAP normalize with strip slashes "1/4foo.txt" > normalized 1`] = ` -1/4foo.txt +exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "\\\\\eee\\\\\\" > normalized 1`] = ` +\\\\\eee\\\\\\ ` -exports[`test/normalize-unicode.js TAP normalize with strip slashes "\\\\a\\\\b\\\\c\\\\d\\\\" > normalized 1`] = ` +exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "\\\\a\\\\b\\\\c\\\\d\\\\" > normalized 1`] = ` /a/b/c/d ` -exports[`test/normalize-unicode.js TAP normalize with strip slashes "¼foo.txt" > normalized 1`] = ` -¼foo.txt -` - -exports[`test/normalize-unicode.js TAP normalize with strip slashes "﹨aaaa﹨dddd﹨" > normalized 1`] = ` +exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "﹨aaaa﹨dddd﹨" > normalized 1`] = ` ﹨aaaa﹨dddd﹨ ` -exports[`test/normalize-unicode.js TAP normalize with strip slashes "\bbb\eee\" > normalized 1`] = ` +exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "\bbb\eee\" > normalized 1`] = ` \bbb\eee\ ` -exports[`test/normalize-unicode.js TAP normalize with strip slashes "\\\\\eee\\\\\\" > normalized 1`] = ` -\\\\\eee\\\\\\ +exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "1/4foo.txt" > normalized 1`] = ` +1/4foo.txt +` + +exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "¼foo.txt" > normalized 1`] = ` +¼foo.txt ` diff --git a/test/create.js b/test/create.js index fdfd78b8..9292c66d 100644 --- a/test/create.js +++ b/test/create.js @@ -1,40 +1,40 @@ -'use strict' +import t from 'tap' +import { c, list, Pack, PackSync } from '../dist/esm/index.js' +import fs from 'fs' +import path from 'path' +import { rimraf } from 'rimraf' +import { mkdirp } from 'mkdirp' +import mutateFS from 'mutate-fs' +import { spawn } from 'child_process' +import { fileURLToPath } from 'url' const isWindows = process.platform === 'win32' -const t = require('tap') -const c = require('../lib/create.js') -const list = require('../lib/list.js') -const fs = require('fs') -const path = require('path') +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) const dir = path.resolve(__dirname, 'fixtures/create') const tars = path.resolve(__dirname, 'fixtures/tars') -const rimraf = require('rimraf') -const mkdirp = require('mkdirp') -const spawn = require('child_process').spawn -const Pack = require('../lib/pack.js') -const mutateFS = require('mutate-fs') -const { promisify } = require('util') const readtar = (file, cb) => { const child = spawn('tar', ['tf', file]) const out = [] child.stdout.on('data', c => out.push(c)) child.on('close', (code, signal) => - cb(code, signal, Buffer.concat(out).toString())) + cb(code, signal, Buffer.concat(out).toString()), + ) } -t.teardown(() => new Promise(resolve => rimraf(dir, resolve))) +t.teardown(() => rimraf(dir)) t.before(async () => { - await promisify(rimraf)(dir) + await rimraf(dir) await mkdirp(dir) }) t.test('no cb if sync or without file', t => { - t.throws(_ => c({ sync: true }, ['asdf'], _ => _)) - t.throws(_ => c(_ => _)) - t.throws(_ => c({}, _ => _)) - t.throws(_ => c({}, ['asdf'], _ => _)) + t.throws(() => c({ sync: true }, ['asdf'], () => {})) + t.throws(() => c(() => {})) + t.throws(() => c({}, () => {})) + t.throws(() => c({}, ['asdf'], () => {})) t.end() }) @@ -43,11 +43,14 @@ t.test('create file', t => { t.test('sync', t => { const file = path.resolve(dir, 'sync.tar') - c({ - file: file, - cwd: __dirname, - sync: true, - }, files) + c( + { + file: file, + cwd: __dirname, + sync: true, + }, + files, + ) readtar(file, (code, signal, list) => { t.equal(code, 0) t.equal(signal, null) @@ -58,28 +61,35 @@ t.test('create file', t => { t.test('async', t => { const file = path.resolve(dir, 'async.tar') - c({ - file: file, - cwd: __dirname, - }, files, er => { - if (er) { - throw er - } - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.end() - }) - }) + c( + { + file: file, + cwd: __dirname, + }, + files, + er => { + if (er) { + throw er + } + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.js') + t.end() + }) + }, + ) }) t.test('async promise only', t => { const file = path.resolve(dir, 'promise.tar') - c({ - file: file, - cwd: __dirname, - }, files).then(_ => { + c( + { + file: file, + cwd: __dirname, + }, + files, + ).then(() => { readtar(file, (code, signal, list) => { t.equal(code, 0) t.equal(signal, null) @@ -93,12 +103,15 @@ t.test('create file', t => { const mode = isWindows ? 0o666 : 0o740 t.test('sync', t => { const file = path.resolve(dir, 'sync-mode.tar') - c({ - mode: mode, - file: file, - cwd: __dirname, - sync: true, - }, files) + c( + { + mode: mode, + file: file, + cwd: __dirname, + sync: true, + }, + files, + ) readtar(file, (code, signal, list) => { t.equal(code, 0) t.equal(signal, null) @@ -110,22 +123,26 @@ t.test('create file', t => { t.test('async', t => { const file = path.resolve(dir, 'async-mode.tar') - c({ - mode: mode, - file: file, - cwd: __dirname, - }, files, er => { - if (er) { - throw er - } - readtar(file, (code, signal, list) => { - t.equal(code, 0) - t.equal(signal, null) - t.equal(list.trim(), 'create.js') - t.equal(fs.lstatSync(file).mode & 0o7777, mode) - t.end() - }) - }) + c( + { + mode: mode, + file: file, + cwd: __dirname, + }, + files, + er => { + if (er) { + throw er + } + readtar(file, (code, signal, list) => { + t.equal(code, 0) + t.equal(signal, null) + t.equal(list.trim(), 'create.js') + t.equal(fs.lstatSync(file).mode & 0o7777, mode) + t.end() + }) + }, + ) }) t.end() @@ -134,7 +151,7 @@ t.test('create file', t => { }) t.test('create', t => { - t.type(c({ sync: true }, ['README.md']), Pack.Sync) + t.type(c({ sync: true }, ['README.md']), PackSync) t.type(c(['README.md']), Pack) t.end() }) @@ -143,12 +160,17 @@ t.test('open fails', t => { const poop = new Error('poop') const file = path.resolve(dir, 'throw-open.tar') t.teardown(mutateFS.statFail(poop)) - t.throws(_ => c({ - file: file, - sync: true, - cwd: __dirname, - }, [path.basename(__filename)])) - t.throws(_ => fs.lstatSync(file)) + t.throws(() => + c( + { + file: file, + sync: true, + cwd: __dirname, + }, + [path.basename(__filename)], + ), + ) + t.throws(() => fs.lstatSync(file)) t.end() }) @@ -158,25 +180,31 @@ t.test('gzipped tarball that makes some drain/resume stuff', t => { // don't include node_modules/.cache, since that gets written to // by nyc during tests, and can result in spurious errors. - const entries = fs.readdirSync(`${cwd}/node_modules`) - .filter(e => !/^\./.test(e)) + const entries = fs + .readdirSync(`${cwd}/node_modules`) + .filter(e => !/^[@.]/.test(e)) .map(e => `node_modules/${e}`) - c({ z: true, C: cwd }, entries) - .pipe(fs.createWriteStream(out)) - .on('finish', _ => { - const child = spawn('tar', ['tf', out], { - stdio: ['ignore', 'ignore', 'pipe'], - }) - child.stderr.on('data', c => { - t.fail(c + '') - }) - child.on('close', (code, signal) => { - t.equal(code, 0) - t.equal(signal, null) - t.end() - }) + const stream = c({ z: true, C: cwd }, entries) + + const outStream = fs.createWriteStream(out) + outStream.on('drain', () => { + stream.resume() + }) + + stream.pipe(outStream).on('finish', () => { + const child = spawn('tar', ['tf', out], { + stdio: ['ignore', 'ignore', 'pipe'], + }) + child.stderr.on('data', c => { + t.fail(c + '') + }) + child.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + t.end() }) + }) }) t.test('create tarball out of another tarball', t => { @@ -192,7 +220,8 @@ t.test('create tarball out of another tarball', t => { 'hardlink-2', 'symlink', ] - list({ f: out, + list({ + f: out, sync: true, onentry: entry => { if (entry.path === 'hardlink-2') { @@ -205,25 +234,33 @@ t.test('create tarball out of another tarball', t => { t.equal(entry.type, 'File') } t.equal(entry.path, expect.shift()) - } }) + }, + }) t.same(expect, []) t.end() } t.test('sync', t => { - c({ - f: out, - cwd: tars, - sync: true, - }, ['@dir.tar', '@utf8.tar', '@links.tar']) + c( + { + f: out, + cwd: tars, + sync: true, + }, + ['@dir.tar', '@utf8.tar', '@links.tar'], + ) check(t) }) - t.test('async', t => { - c({ - f: out, - cwd: tars, - }, ['@dir.tar', '@utf8.tar', '@links.tar'], _ => check(t)) + t.test('async', async t => { + await c( + { + f: out, + cwd: tars, + }, + ['@dir.tar', '@utf8.tar', '@links.tar'], + ) + check(t) }) t.end() diff --git a/test/cwd-error.js b/test/cwd-error.js new file mode 100644 index 00000000..1961b404 --- /dev/null +++ b/test/cwd-error.js @@ -0,0 +1,10 @@ +import t from 'tap' +import { CwdError } from '../dist/esm/cwd-error.js' + +t.match(new CwdError('path', 'code'), { + name: 'CwdError', + path: 'path', + code: 'code', + syscall: 'chdir', + message: `code: Cannot cd into 'path'`, +}) diff --git a/test/extract.js b/test/extract.js index c11d0afc..8fe8eff1 100644 --- a/test/extract.js +++ b/test/extract.js @@ -1,18 +1,22 @@ -'use strict' - -const t = require('tap') -const nock = require('nock') -const x = require('../lib/extract.js') -const path = require('path') -const fs = require('fs') +import t from 'tap' +import nock from 'nock' +import { extract as x } from '../dist/esm/extract.js' +import path from 'path' +import fs from 'fs' +import { fileURLToPath } from 'url' +import { promisify } from 'util' +import { mkdirp } from 'mkdirp' +import { rimraf } from 'rimraf' +import { pipeline as PL } from 'stream' +import { Unpack, UnpackSync } from '../dist/esm/unpack.js' +const pipeline = promisify(PL) +import http from 'http' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) const extractdir = path.resolve(__dirname, 'fixtures/extract') const tars = path.resolve(__dirname, 'fixtures/tars') -const mkdirp = require('mkdirp') -const { promisify } = require('util') -const rimraf = promisify(require('rimraf')) -const mutateFS = require('mutate-fs') -const pipeline = promisify(require('stream').pipeline) -const http = require('http') +import mutateFS from 'mutate-fs' const tnock = (t, host, opts) => { nock.disableNetConnect() @@ -24,7 +28,7 @@ const tnock = (t, host, opts) => { return server } -t.teardown(_ => rimraf(extractdir)) +t.teardown(() => rimraf(extractdir)) t.test('basic extracting', t => { const file = path.resolve(tars, 'utf8.tar') @@ -38,11 +42,15 @@ t.test('basic extracting', t => { const check = async t => { fs.lstatSync(dir + '/Ω.txt') fs.lstatSync(dir + '/🌟.txt') - t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + - '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) + t.throws(() => + fs.lstatSync( + dir + + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + ), + ) await rimraf(dir) - t.end() } const files = ['🌟.txt', 'Ω.txt'] @@ -51,12 +59,13 @@ t.test('basic extracting', t => { return check(t) }) - t.test('async promisey', t => { - return x({ file: file, cwd: dir }, files).then(_ => check(t)) + t.test('async promisey', async t => { + await x({ file: file, cwd: dir }, files) + return check(t) }) - t.test('async cb', t => { - return x({ file: file, cwd: dir }, files, er => { + t.test('async cb', async t => { + await x({ file: file, cwd: dir }, files, er => { if (er) { throw er } @@ -67,11 +76,11 @@ t.test('basic extracting', t => { t.end() }) -t.test('ensure an open stream is not prematuraly closed', t => { +t.test('ensure an open stream is not prematurely closed', t => { t.plan(1) const file = path.resolve(tars, 'long-paths.tar') - const dir = path.resolve(extractdir, 'basic-with-stream') + const dir = t.testdir({}) t.beforeEach(async () => { await rimraf(dir) @@ -84,14 +93,12 @@ t.test('ensure an open stream is not prematuraly closed', t => { t.end() } - t.test('async promisey', t => { + t.test('async promisey', async t => { const stream = fs.createReadStream(file, { highWaterMark: 1, }) - pipeline( - stream, - x({ cwd: dir }) - ).then(_ => check(t)) + await pipeline(stream, x({ cwd: dir })) + return check(t) }) t.end() @@ -120,12 +127,13 @@ t.test('ensure an open stream is not prematuraly closed http', t => { .delay(250) .reply(200, () => fs.createReadStream(file)) - http.get('http://codeload.github.com/npm/node-tar/tar.gz/main', (stream) => { - return pipeline( - stream, - x({ cwd: dir }) - ).then(_ => check(t)) - }) + http.get( + 'http://codeload.github.com/npm/node-tar/tar.gz/main', + async stream => { + await pipeline(stream, x({ cwd: dir })) + return check(t) + }, + ) }) t.end() @@ -142,34 +150,47 @@ t.test('file list and filter', t => { const check = async t => { fs.lstatSync(dir + '/Ω.txt') - t.throws(_ => fs.lstatSync(dir + '/🌟.txt')) - t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + - '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt')) + t.throws(() => fs.lstatSync(dir + '/🌟.txt')) + t.throws(() => + fs.lstatSync( + dir + + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' + + '/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + ), + ) await rimraf(dir) - t.end() } const filter = path => path === 'Ω.txt' t.test('sync', t => { - x({ filter: filter, file: file, sync: true, C: dir }, ['🌟.txt', 'Ω.txt']) + x({ filter: filter, file: file, sync: true, C: dir }, [ + '🌟.txt', + 'Ω.txt', + ]) return check(t) }) - t.test('async promisey', t => { - return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt']).then(_ => { - return check(t) - }) + t.test('async promisey', async t => { + await x({ filter: filter, file: file, cwd: dir }, [ + '🌟.txt', + 'Ω.txt', + ]) + check(t) }) t.test('async cb', t => { - return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt'], er => { - if (er) { - throw er - } - return check(t) - }) + return x( + { filter: filter, file: file, cwd: dir }, + ['🌟.txt', 'Ω.txt'], + er => { + if (er) { + throw er + } + return check(t) + }, + ) }) t.end() @@ -185,12 +206,17 @@ t.test('no file list', t => { }) const check = async t => { - t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) - t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) + t.equal( + fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, + 1024, + ) + t.equal( + fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, + 512, + ) t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) await rimraf(dir) - t.end() } t.test('sync', t => { @@ -198,10 +224,9 @@ t.test('no file list', t => { return check(t) }) - t.test('async promisey', t => { - return x({ file: file, cwd: dir }).then(_ => { - return check(t) - }) + t.test('async promisey', async t => { + await x({ file: file, cwd: dir }) + return check(t) }) t.test('async cb', t => { @@ -227,12 +252,17 @@ t.test('read in itty bits', t => { }) const check = async t => { - t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024) - t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512) + t.equal( + fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, + 1024, + ) + t.equal( + fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, + 512, + ) t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1) t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0) await rimraf(dir) - t.end() } t.test('sync', t => { @@ -240,49 +270,53 @@ t.test('read in itty bits', t => { return check(t) }) - t.test('async promisey', t => { - return x({ file: file, cwd: dir, maxReadSize: maxReadSize }).then(_ => { - return check(t) - }) + t.test('async promisey', async t => { + await x({ file: file, cwd: dir, maxReadSize: maxReadSize }) + return check(t) }) t.test('async cb', t => { - return x({ file: file, cwd: dir, maxReadSize: maxReadSize }, er => { - if (er) { - throw er - } - return check(t) - }) + return x( + { file: file, cwd: dir, maxReadSize: maxReadSize }, + er => { + if (er) { + throw er + } + return check(t) + }, + ) }) t.end() }) t.test('bad calls', t => { - t.throws(_ => x(_ => _)) - t.throws(_ => x({ sync: true }, _ => _)) - t.throws(_ => x({ sync: true }, [], _ => _)) + t.throws(() => x(() => {})) + t.throws(() => x({ sync: true }, () => {})) + t.throws(() => x({ sync: true }, [], () => {})) t.end() }) t.test('no file', t => { - const Unpack = require('../lib/unpack.js') t.type(x(), Unpack) t.type(x(['asdf']), Unpack) - t.type(x({ sync: true }), Unpack.Sync) + t.type(x({ sync: true }), UnpackSync) t.end() }) t.test('nonexistent', t => { - t.throws(_ => x({ sync: true, file: 'does not exist' })) - x({ file: 'does not exist' }).catch(_ => t.end()) + t.throws(() => x({ sync: true, file: 'does not exist' })) + x({ file: 'does not exist' }).catch(() => t.end()) }) t.test('read fail', t => { const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) - t.throws(_ => x({ maxReadSize: 10, sync: true, file: __filename }), poop) + t.throws( + () => x({ maxReadSize: 10, sync: true, file: __filename }), + poop, + ) t.end() }) @@ -305,8 +339,18 @@ t.test('sync gzip error edge case test', async t => { }, }) - t.same(fs.readdirSync(dir + '/x').sort(), - ['1', '10', '2', '3', '4', '5', '6', '7', '8', '9']) + t.same(fs.readdirSync(dir + '/x').sort(), [ + '1', + '10', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + ]) t.end() }) @@ -328,24 +372,46 @@ t.test('brotli', async t => { const f = fs.openSync(filename, 'a') fs.closeSync(f) - const expect = new Error('TAR_BAD_ARCHIVE: Unrecognized archive format') + const expect = new Error( + 'TAR_BAD_ARCHIVE: Unrecognized archive format', + ) - t.throws(_ => x({ sync: true, file: filename }), expect) + t.throws(() => x({ sync: true, file: filename }), expect) }) t.test('succeeds based on file extension', t => { x({ sync: true, file: file, C: dir }) - t.same(fs.readdirSync(dir + '/x').sort(), - ['1', '10', '2', '3', '4', '5', '6', '7', '8', '9']) + t.same(fs.readdirSync(dir + '/x').sort(), [ + '1', + '10', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + ]) t.end() }) t.test('succeeds when passed explicit option', t => { x({ sync: true, file: file, C: dir, brotli: true }) - t.same(fs.readdirSync(dir + '/x').sort(), - ['1', '10', '2', '3', '4', '5', '6', '7', '8', '9']) + t.same(fs.readdirSync(dir + '/x').sort(), [ + '1', + '10', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + ]) t.end() }) }) diff --git a/test/fixtures/make-tar.js b/test/fixtures/make-tar.js new file mode 100644 index 00000000..77ee2b26 --- /dev/null +++ b/test/fixtures/make-tar.js @@ -0,0 +1,26 @@ +import { Header } from '../../dist/esm/header.js' +export const makeTar = chunks => { + let dataLen = 0 + return Buffer.concat( + chunks.map(chunk => { + if (Buffer.isBuffer(chunk)) { + dataLen += chunk.length + return chunk + } + const size = Math.max( + typeof chunk === 'string' + ? 512 * Math.ceil(chunk.length / 512) + : 512, + ) + dataLen += size + const buf = Buffer.alloc(size) + if (typeof chunk === 'string') { + buf.write(chunk) + } else { + new Header(chunk).encode(buf, 0) + } + return buf + }), + dataLen, + ) +} diff --git a/test/fixtures/parse/bad-cksum--filter-strict.json b/test/fixtures/parse/bad-cksum--filter-strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--filter-strict.json +++ b/test/fixtures/parse/bad-cksum--filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--filter.json b/test/fixtures/parse/bad-cksum--filter.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum--filter.json +++ b/test/fixtures/parse/bad-cksum--filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json b/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json +++ b/test/fixtures/parse/bad-cksum--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250-filter.json b/test/fixtures/parse/bad-cksum--meta-250-filter.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum--meta-250-filter.json +++ b/test/fixtures/parse/bad-cksum--meta-250-filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250-strict.json b/test/fixtures/parse/bad-cksum--meta-250-strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--meta-250-strict.json +++ b/test/fixtures/parse/bad-cksum--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--meta-250.json b/test/fixtures/parse/bad-cksum--meta-250.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum--meta-250.json +++ b/test/fixtures/parse/bad-cksum--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum--strict.json b/test/fixtures/parse/bad-cksum--strict.json index edfce539..48e0680b 100644 --- a/test/fixtures/parse/bad-cksum--strict.json +++ b/test/fixtures/parse/bad-cksum--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/bad-cksum.json b/test/fixtures/parse/bad-cksum.json index 55274a4e..d2664877 100644 --- a/test/fixtures/parse/bad-cksum.json +++ b/test/fixtures/parse/bad-cksum.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--filter-strict.json b/test/fixtures/parse/body-byte-counts--filter-strict.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--filter-strict.json +++ b/test/fixtures/parse/body-byte-counts--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--filter.json b/test/fixtures/parse/body-byte-counts--filter.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--filter.json +++ b/test/fixtures/parse/body-byte-counts--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json b/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json +++ b/test/fixtures/parse/body-byte-counts--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250-filter.json b/test/fixtures/parse/body-byte-counts--meta-250-filter.json index 7290291b..9462cd61 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250-filter.json +++ b/test/fixtures/parse/body-byte-counts--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250-strict.json b/test/fixtures/parse/body-byte-counts--meta-250-strict.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250-strict.json +++ b/test/fixtures/parse/body-byte-counts--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--meta-250.json b/test/fixtures/parse/body-byte-counts--meta-250.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts--meta-250.json +++ b/test/fixtures/parse/body-byte-counts--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts--strict.json b/test/fixtures/parse/body-byte-counts--strict.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts--strict.json +++ b/test/fixtures/parse/body-byte-counts--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/body-byte-counts.json b/test/fixtures/parse/body-byte-counts.json index 8069e9cd..8ae48534 100644 --- a/test/fixtures/parse/body-byte-counts.json +++ b/test/fixtures/parse/body-byte-counts.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1024, "mtime": "2017-04-10T16:57:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 512, "mtime": "2017-04-10T17:08:55.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -93,9 +77,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -110,17 +91,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -132,9 +109,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:08:01.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -149,9 +123,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--filter-strict.json b/test/fixtures/parse/dir--filter-strict.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--filter-strict.json +++ b/test/fixtures/parse/dir--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--filter.json b/test/fixtures/parse/dir--filter.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--filter.json +++ b/test/fixtures/parse/dir--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250-filter-strict.json b/test/fixtures/parse/dir--meta-250-filter-strict.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--meta-250-filter-strict.json +++ b/test/fixtures/parse/dir--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250-filter.json b/test/fixtures/parse/dir--meta-250-filter.json index 69926438..52757dae 100644 --- a/test/fixtures/parse/dir--meta-250-filter.json +++ b/test/fixtures/parse/dir--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250-strict.json b/test/fixtures/parse/dir--meta-250-strict.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir--meta-250-strict.json +++ b/test/fixtures/parse/dir--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--meta-250.json b/test/fixtures/parse/dir--meta-250.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir--meta-250.json +++ b/test/fixtures/parse/dir--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir--strict.json b/test/fixtures/parse/dir--strict.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir--strict.json +++ b/test/fixtures/parse/dir--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/dir.json b/test/fixtures/parse/dir.json index 0a933bf5..0688cc0b 100644 --- a/test/fixtures/parse/dir.json +++ b/test/fixtures/parse/dir.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T17:00:17.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--filter-strict.json b/test/fixtures/parse/emptypax--filter-strict.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--filter-strict.json +++ b/test/fixtures/parse/emptypax--filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--filter.json b/test/fixtures/parse/emptypax--filter.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--filter.json +++ b/test/fixtures/parse/emptypax--filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250-filter-strict.json b/test/fixtures/parse/emptypax--meta-250-filter-strict.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--meta-250-filter-strict.json +++ b/test/fixtures/parse/emptypax--meta-250-filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250-filter.json b/test/fixtures/parse/emptypax--meta-250-filter.json index 2810367d..eb30c2be 100644 --- a/test/fixtures/parse/emptypax--meta-250-filter.json +++ b/test/fixtures/parse/emptypax--meta-250-filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250-strict.json b/test/fixtures/parse/emptypax--meta-250-strict.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax--meta-250-strict.json +++ b/test/fixtures/parse/emptypax--meta-250-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--meta-250.json b/test/fixtures/parse/emptypax--meta-250.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax--meta-250.json +++ b/test/fixtures/parse/emptypax--meta-250.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax--strict.json b/test/fixtures/parse/emptypax--strict.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax--strict.json +++ b/test/fixtures/parse/emptypax--strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/emptypax.json b/test/fixtures/parse/emptypax.json index 686a428a..55125fe7 100644 --- a/test/fixtures/parse/emptypax.json +++ b/test/fixtures/parse/emptypax.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:33:21.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:33:21.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,17 +40,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -75,9 +58,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--filter-strict.json b/test/fixtures/parse/file--filter-strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--filter-strict.json +++ b/test/fixtures/parse/file--filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--filter.json b/test/fixtures/parse/file--filter.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--filter.json +++ b/test/fixtures/parse/file--filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250-filter-strict.json b/test/fixtures/parse/file--meta-250-filter-strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250-filter-strict.json +++ b/test/fixtures/parse/file--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250-filter.json b/test/fixtures/parse/file--meta-250-filter.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250-filter.json +++ b/test/fixtures/parse/file--meta-250-filter.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250-strict.json b/test/fixtures/parse/file--meta-250-strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250-strict.json +++ b/test/fixtures/parse/file--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--meta-250.json b/test/fixtures/parse/file--meta-250.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--meta-250.json +++ b/test/fixtures/parse/file--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file--strict.json b/test/fixtures/parse/file--strict.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file--strict.json +++ b/test/fixtures/parse/file--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/file.json b/test/fixtures/parse/file.json index 07e38b45..800a4e51 100644 --- a/test/fixtures/parse/file.json +++ b/test/fixtures/parse/file.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--filter-strict.json b/test/fixtures/parse/global-header--filter-strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--filter-strict.json +++ b/test/fixtures/parse/global-header--filter-strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--filter.json b/test/fixtures/parse/global-header--filter.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--filter.json +++ b/test/fixtures/parse/global-header--filter.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250-filter-strict.json b/test/fixtures/parse/global-header--meta-250-filter-strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250-filter-strict.json +++ b/test/fixtures/parse/global-header--meta-250-filter-strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250-filter.json b/test/fixtures/parse/global-header--meta-250-filter.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250-filter.json +++ b/test/fixtures/parse/global-header--meta-250-filter.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250-strict.json b/test/fixtures/parse/global-header--meta-250-strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250-strict.json +++ b/test/fixtures/parse/global-header--meta-250-strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--meta-250.json b/test/fixtures/parse/global-header--meta-250.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--meta-250.json +++ b/test/fixtures/parse/global-header--meta-250.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header--strict.json b/test/fixtures/parse/global-header--strict.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header--strict.json +++ b/test/fixtures/parse/global-header--strict.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/global-header.json b/test/fixtures/parse/global-header.json index 114679a2..3032d06b 100644 --- a/test/fixtures/parse/global-header.json +++ b/test/fixtures/parse/global-header.json @@ -6,23 +6,8 @@ [ "entry", { - "extended": null, "globalExtended": { - "atime": null, - "charset": null, - "comment": null, - "ctime": null, - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, "path": "ab", - "size": null, - "uid": null, - "uname": null, - "dev": null, - "ino": null, - "nlink": null, "global": true }, "type": "File", @@ -36,9 +21,6 @@ "gname": "staff", "size": 1, "mtime": "2017-04-10T16:58:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +35,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--filter-strict.json b/test/fixtures/parse/links--filter-strict.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--filter-strict.json +++ b/test/fixtures/parse/links--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--filter.json b/test/fixtures/parse/links--filter.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--filter.json +++ b/test/fixtures/parse/links--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250-filter-strict.json b/test/fixtures/parse/links--meta-250-filter-strict.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--meta-250-filter-strict.json +++ b/test/fixtures/parse/links--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250-filter.json b/test/fixtures/parse/links--meta-250-filter.json index ed902aa1..acf569af 100644 --- a/test/fixtures/parse/links--meta-250-filter.json +++ b/test/fixtures/parse/links--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250-strict.json b/test/fixtures/parse/links--meta-250-strict.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links--meta-250-strict.json +++ b/test/fixtures/parse/links--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--meta-250.json b/test/fixtures/parse/links--meta-250.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links--meta-250.json +++ b/test/fixtures/parse/links--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links--strict.json b/test/fixtures/parse/links--strict.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links--strict.json +++ b/test/fixtures/parse/links--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--filter-strict.json b/test/fixtures/parse/links-invalid--filter-strict.json index fd2d5dc8..6fdfb2d5 100644 --- a/test/fixtures/parse/links-invalid--filter-strict.json +++ b/test/fixtures/parse/links-invalid--filter-strict.json @@ -23,8 +23,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--filter.json b/test/fixtures/parse/links-invalid--filter.json index 42a58d14..1e14b8a0 100644 --- a/test/fixtures/parse/links-invalid--filter.json +++ b/test/fixtures/parse/links-invalid--filter.json @@ -17,8 +17,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250-filter-strict.json b/test/fixtures/parse/links-invalid--meta-250-filter-strict.json index fd2d5dc8..6fdfb2d5 100644 --- a/test/fixtures/parse/links-invalid--meta-250-filter-strict.json +++ b/test/fixtures/parse/links-invalid--meta-250-filter-strict.json @@ -23,8 +23,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250-filter.json b/test/fixtures/parse/links-invalid--meta-250-filter.json index 42a58d14..1e14b8a0 100644 --- a/test/fixtures/parse/links-invalid--meta-250-filter.json +++ b/test/fixtures/parse/links-invalid--meta-250-filter.json @@ -17,8 +17,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250-strict.json b/test/fixtures/parse/links-invalid--meta-250-strict.json index 0c6c72d0..6e9c1531 100644 --- a/test/fixtures/parse/links-invalid--meta-250-strict.json +++ b/test/fixtures/parse/links-invalid--meta-250-strict.json @@ -23,8 +23,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--meta-250.json b/test/fixtures/parse/links-invalid--meta-250.json index c62395e3..569dcb62 100644 --- a/test/fixtures/parse/links-invalid--meta-250.json +++ b/test/fixtures/parse/links-invalid--meta-250.json @@ -17,8 +17,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid--strict.json b/test/fixtures/parse/links-invalid--strict.json index 0c6c72d0..6e9c1531 100644 --- a/test/fixtures/parse/links-invalid--strict.json +++ b/test/fixtures/parse/links-invalid--strict.json @@ -23,8 +23,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -36,8 +34,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -53,9 +49,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-invalid.json b/test/fixtures/parse/links-invalid.json index c62395e3..569dcb62 100644 --- a/test/fixtures/parse/links-invalid.json +++ b/test/fixtures/parse/links-invalid.json @@ -17,8 +17,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -30,8 +28,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -47,9 +43,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--filter-strict.json b/test/fixtures/parse/links-strip--filter-strict.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--filter-strict.json +++ b/test/fixtures/parse/links-strip--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--filter.json b/test/fixtures/parse/links-strip--filter.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--filter.json +++ b/test/fixtures/parse/links-strip--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250-filter-strict.json b/test/fixtures/parse/links-strip--meta-250-filter-strict.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--meta-250-filter-strict.json +++ b/test/fixtures/parse/links-strip--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250-filter.json b/test/fixtures/parse/links-strip--meta-250-filter.json index 9a640e0b..47069c6b 100644 --- a/test/fixtures/parse/links-strip--meta-250-filter.json +++ b/test/fixtures/parse/links-strip--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": true, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250-strict.json b/test/fixtures/parse/links-strip--meta-250-strict.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip--meta-250-strict.json +++ b/test/fixtures/parse/links-strip--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--meta-250.json b/test/fixtures/parse/links-strip--meta-250.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip--meta-250.json +++ b/test/fixtures/parse/links-strip--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip--strict.json b/test/fixtures/parse/links-strip--strict.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip--strict.json +++ b/test/fixtures/parse/links-strip--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links-strip.json b/test/fixtures/parse/links-strip.json index 17e5397d..f908c9e3 100644 --- a/test/fixtures/parse/links-strip.json +++ b/test/fixtures/parse/links-strip.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "", "size": 26, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,17 +59,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -93,8 +77,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -110,17 +92,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -132,8 +110,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "strip-dir/hardlink-1", "header": { "cksumValid": true, @@ -149,17 +125,13 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -171,8 +143,6 @@ "gname": "", "size": 0, "mtime": "2018-11-06T01:45:25.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -188,9 +158,7 @@ "uname": "", "gname": "", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/links.json b/test/fixtures/parse/links.json index fdf07254..216a3ba7 100644 --- a/test/fixtures/parse/links.json +++ b/test/fixtures/parse/links.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 26, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Link", "meta": false, "ignore": false, @@ -54,8 +45,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:05.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-1", "header": { "cksumValid": true, @@ -71,17 +60,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -93,8 +78,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T19:27:33.000Z", - "atime": null, - "ctime": null, "linkpath": "hardlink-2", "header": { "cksumValid": true, @@ -110,9 +93,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--filter-strict.json b/test/fixtures/parse/long-paths--filter-strict.json index 8dfe9de3..53e376da 100644 --- a/test/fixtures/parse/long-paths--filter-strict.json +++ b/test/fixtures/parse/long-paths--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--filter.json b/test/fixtures/parse/long-paths--filter.json index 8dfe9de3..53e376da 100644 --- a/test/fixtures/parse/long-paths--filter.json +++ b/test/fixtures/parse/long-paths--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250-filter-strict.json b/test/fixtures/parse/long-paths--meta-250-filter-strict.json index 2a2e4960..0ceedf38 100644 --- a/test/fixtures/parse/long-paths--meta-250-filter-strict.json +++ b/test/fixtures/parse/long-paths--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250-filter.json b/test/fixtures/parse/long-paths--meta-250-filter.json index 2a2e4960..0ceedf38 100644 --- a/test/fixtures/parse/long-paths--meta-250-filter.json +++ b/test/fixtures/parse/long-paths--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250-strict.json b/test/fixtures/parse/long-paths--meta-250-strict.json index 532f2365..7fdf8ac4 100644 --- a/test/fixtures/parse/long-paths--meta-250-strict.json +++ b/test/fixtures/parse/long-paths--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--meta-250.json b/test/fixtures/parse/long-paths--meta-250.json index 532f2365..7fdf8ac4 100644 --- a/test/fixtures/parse/long-paths--meta-250.json +++ b/test/fixtures/parse/long-paths--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,17 +72,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -114,9 +90,6 @@ "gname": "staff", "size": 283, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -131,17 +104,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -153,9 +122,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -170,17 +136,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -192,9 +154,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -209,17 +168,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -231,9 +186,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -248,17 +200,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -270,9 +218,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -287,17 +232,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -309,9 +250,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -326,17 +264,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -348,9 +282,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -365,17 +296,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -387,9 +314,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -404,17 +328,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -426,9 +346,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -443,17 +360,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -465,9 +378,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -482,17 +392,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -504,9 +410,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -521,17 +424,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -543,9 +442,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -560,17 +456,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -582,9 +474,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -599,17 +488,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -621,9 +506,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -638,17 +520,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -660,9 +538,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -677,17 +552,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -699,9 +570,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -716,17 +584,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -738,9 +602,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -755,17 +616,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -777,9 +634,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -794,17 +648,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -816,9 +666,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -833,17 +680,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -855,9 +698,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -872,17 +712,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -894,9 +730,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -911,17 +744,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -933,9 +762,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -950,17 +776,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -972,9 +794,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -989,17 +808,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1011,9 +826,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1028,17 +840,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1050,9 +858,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1067,17 +872,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1089,9 +890,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1106,17 +904,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1128,9 +922,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1145,17 +936,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1167,9 +954,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1184,17 +968,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1206,9 +986,6 @@ "gname": "staff", "size": 289, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1223,17 +1000,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1245,9 +1018,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1262,17 +1032,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -1284,9 +1050,6 @@ "gname": "staff", "size": 339, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1301,17 +1064,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1323,9 +1082,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1340,9 +1096,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1355,23 +1109,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1384,8 +1127,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1400,9 +1141,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths--strict.json b/test/fixtures/parse/long-paths--strict.json index 835fd96c..fb49f544 100644 --- a/test/fixtures/parse/long-paths--strict.json +++ b/test/fixtures/parse/long-paths--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-paths.json b/test/fixtures/parse/long-paths.json index 835fd96c..fb49f544 100644 --- a/test/fixtures/parse/long-paths.json +++ b/test/fixtures/parse/long-paths.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -15,9 +13,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:53:02.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,9 +27,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -47,23 +40,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:54:12.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836297, "nlink": 1, + "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -76,8 +58,6 @@ "size": 100, "mtime": "2017-04-10T16:54:12.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:54:12.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -92,9 +72,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -107,23 +85,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:56:18.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836326, "nlink": 1, + "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -136,8 +103,6 @@ "size": 100, "mtime": "2017-04-10T16:56:18.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:56:18.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -152,17 +117,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -174,9 +135,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -191,17 +149,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -213,9 +167,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -230,17 +181,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -252,9 +199,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -269,17 +213,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -291,9 +231,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -308,17 +245,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -330,9 +263,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -347,17 +277,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -369,9 +295,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -386,17 +309,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -408,9 +327,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -425,17 +341,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -447,9 +359,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -464,17 +373,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -486,9 +391,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -503,17 +405,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -525,9 +423,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -542,17 +437,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -564,9 +455,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -581,17 +469,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -603,9 +487,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -620,17 +501,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -642,9 +519,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -659,17 +533,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -681,9 +551,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -698,17 +565,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -720,9 +583,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -737,17 +597,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -759,9 +615,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -776,17 +629,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -798,9 +647,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -815,17 +661,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -837,9 +679,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -854,17 +693,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -876,9 +711,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -893,17 +725,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -915,9 +743,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -932,17 +757,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -954,9 +775,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -971,17 +789,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -993,9 +807,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1010,17 +821,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1032,9 +839,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1049,17 +853,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -1071,9 +871,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T16:58:47.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1088,17 +885,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1110,9 +903,6 @@ "gname": "staff", "size": 6, "mtime": "2017-04-10T16:56:46.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1127,17 +917,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1149,9 +935,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:52:20.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1166,9 +949,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1181,23 +962,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836253, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1210,8 +980,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1226,9 +994,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1241,23 +1007,12 @@ { "extended": { "atime": "2017-04-10T17:01:57.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:52:20.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836254, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1270,8 +1025,6 @@ "size": 100, "mtime": "2017-04-10T16:52:20.000Z", "atime": "2017-04-10T17:01:57.000Z", - "ctime": "2017-04-10T16:52:20.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1286,9 +1039,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -1301,23 +1052,12 @@ { "extended": { "atime": "2017-04-10T17:07:25.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -1330,8 +1070,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:07:25.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -1346,9 +1084,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--filter-strict.json b/test/fixtures/parse/long-pax--filter-strict.json index 7e9df8bb..57f80513 100644 --- a/test/fixtures/parse/long-pax--filter-strict.json +++ b/test/fixtures/parse/long-pax--filter-strict.json @@ -7,24 +7,16 @@ "ignoredEntry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--filter.json b/test/fixtures/parse/long-pax--filter.json index 7e9df8bb..57f80513 100644 --- a/test/fixtures/parse/long-pax--filter.json +++ b/test/fixtures/parse/long-pax--filter.json @@ -7,24 +7,16 @@ "ignoredEntry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250-filter-strict.json b/test/fixtures/parse/long-pax--meta-250-filter-strict.json index 584863e0..4129b6fc 100644 --- a/test/fixtures/parse/long-pax--meta-250-filter-strict.json +++ b/test/fixtures/parse/long-pax--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250-filter.json b/test/fixtures/parse/long-pax--meta-250-filter.json index 584863e0..4129b6fc 100644 --- a/test/fixtures/parse/long-pax--meta-250-filter.json +++ b/test/fixtures/parse/long-pax--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250-strict.json b/test/fixtures/parse/long-pax--meta-250-strict.json index 5a19f0ec..f989196b 100644 --- a/test/fixtures/parse/long-pax--meta-250-strict.json +++ b/test/fixtures/parse/long-pax--meta-250-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--meta-250.json b/test/fixtures/parse/long-pax--meta-250.json index 5a19f0ec..f989196b 100644 --- a/test/fixtures/parse/long-pax--meta-250.json +++ b/test/fixtures/parse/long-pax--meta-250.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "ExtendedHeader", "meta": true, "ignore": true, @@ -15,9 +13,6 @@ "gname": "staff", "size": 1282, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -32,17 +27,13 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], [ "entry", { - "extended": null, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -54,9 +45,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -71,9 +59,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax--strict.json b/test/fixtures/parse/long-pax--strict.json index c72def09..2385c664 100644 --- a/test/fixtures/parse/long-pax--strict.json +++ b/test/fixtures/parse/long-pax--strict.json @@ -7,24 +7,16 @@ "entry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/long-pax.json b/test/fixtures/parse/long-pax.json index c72def09..2385c664 100644 --- a/test/fixtures/parse/long-pax.json +++ b/test/fixtures/parse/long-pax.json @@ -7,24 +7,16 @@ "entry", { "extended": { - "atime": null, - "charset": null, + "mtime": "2017-04-10T16:54:12.000Z", "comment": "all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy all work and no play makes johnny a tar boy", - "ctime": null, "gid": 20, + "uid": 501, "gname": "staff", - "linkpath": null, - "mtime": "2017-04-10T16:54:12.000Z", + "uname": "isaacs", "path": "120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "size": 100, - "uid": 501, - "uname": "isaacs", - "dev": null, - "ino": null, - "nlink": null, "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -36,9 +28,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:54:12.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +42,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--filter-strict.json b/test/fixtures/parse/next-file-has-long--filter-strict.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--filter-strict.json +++ b/test/fixtures/parse/next-file-has-long--filter-strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--filter.json b/test/fixtures/parse/next-file-has-long--filter.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--filter.json +++ b/test/fixtures/parse/next-file-has-long--filter.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json b/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json +++ b/test/fixtures/parse/next-file-has-long--meta-250-filter-strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250-filter.json b/test/fixtures/parse/next-file-has-long--meta-250-filter.json index a010d9e3..efe114cd 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250-filter.json +++ b/test/fixtures/parse/next-file-has-long--meta-250-filter.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": true, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250-strict.json b/test/fixtures/parse/next-file-has-long--meta-250-strict.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250-strict.json +++ b/test/fixtures/parse/next-file-has-long--meta-250-strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--meta-250.json b/test/fixtures/parse/next-file-has-long--meta-250.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long--meta-250.json +++ b/test/fixtures/parse/next-file-has-long--meta-250.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long--strict.json b/test/fixtures/parse/next-file-has-long--strict.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long--strict.json +++ b/test/fixtures/parse/next-file-has-long--strict.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/next-file-has-long.json b/test/fixtures/parse/next-file-has-long.json index 9d6eef02..852c825b 100644 --- a/test/fixtures/parse/next-file-has-long.json +++ b/test/fixtures/parse/next-file-has-long.json @@ -9,7 +9,6 @@ "extended": { "path": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -21,9 +20,6 @@ "gname": "staff", "size": 100, "mtime": "2017-04-10T16:56:18.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -38,9 +34,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -54,7 +48,6 @@ "extended": { "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" }, - "globalExtended": null, "type": "SymbolicLink", "meta": false, "ignore": false, @@ -66,8 +59,6 @@ "gname": "staff", "size": 0, "mtime": "2017-04-10T23:22:33.000Z", - "atime": null, - "ctime": null, "linkpath": "170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "header": { "cksumValid": true, @@ -83,9 +74,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--filter-strict.json b/test/fixtures/parse/null-byte--filter-strict.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--filter-strict.json +++ b/test/fixtures/parse/null-byte--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--filter.json b/test/fixtures/parse/null-byte--filter.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--filter.json +++ b/test/fixtures/parse/null-byte--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250-filter-strict.json b/test/fixtures/parse/null-byte--meta-250-filter-strict.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--meta-250-filter-strict.json +++ b/test/fixtures/parse/null-byte--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250-filter.json b/test/fixtures/parse/null-byte--meta-250-filter.json index 3c27f2bb..e96a4f8a 100644 --- a/test/fixtures/parse/null-byte--meta-250-filter.json +++ b/test/fixtures/parse/null-byte--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250-strict.json b/test/fixtures/parse/null-byte--meta-250-strict.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte--meta-250-strict.json +++ b/test/fixtures/parse/null-byte--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--meta-250.json b/test/fixtures/parse/null-byte--meta-250.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte--meta-250.json +++ b/test/fixtures/parse/null-byte--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte--strict.json b/test/fixtures/parse/null-byte--strict.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte--strict.json +++ b/test/fixtures/parse/null-byte--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/null-byte.json b/test/fixtures/parse/null-byte.json index a9bad194..bb1f092c 100644 --- a/test/fixtures/parse/null-byte.json +++ b/test/fixtures/parse/null-byte.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 509, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 0, "mtime": "2017-07-31T22:21:58.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2017-07-31T22:21:58.000Z", "cksum": 11228, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "this_is_a_really_long_directory_name_with_a_lot_of_characters/this_is_a_really_long_tgz_file_with_a_lot_of_characters.tgz" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 436, "uid": 1000, "gid": 1000, - "uname": null, - "gname": null, "size": 200, "mtime": "2017-07-31T22:21:53.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2017-07-31T22:21:53.000Z", "cksum": 15210, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json b/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--filter.json b/test/fixtures/parse/trailing-slash-corner-case--filter.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--filter.json +++ b/test/fixtures/parse/trailing-slash-corner-case--filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter-strict.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json index 3739da97..151be717 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250-filter.json @@ -2,8 +2,6 @@ [ "ignoredEntry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": true, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250-strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--meta-250.json b/test/fixtures/parse/trailing-slash-corner-case--meta-250.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--meta-250.json +++ b/test/fixtures/parse/trailing-slash-corner-case--meta-250.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case--strict.json b/test/fixtures/parse/trailing-slash-corner-case--strict.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case--strict.json +++ b/test/fixtures/parse/trailing-slash-corner-case--strict.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/trailing-slash-corner-case.json b/test/fixtures/parse/trailing-slash-corner-case.json index 14200298..c5110f0c 100644 --- a/test/fixtures/parse/trailing-slash-corner-case.json +++ b/test/fixtures/parse/trailing-slash-corner-case.json @@ -2,8 +2,6 @@ [ "entry", { - "extended": null, - "globalExtended": null, "type": "Directory", "meta": false, "ignore": false, @@ -11,13 +9,8 @@ "mode": 493, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 0, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -29,12 +22,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13612, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -48,7 +37,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/a-truly-unlucky-file-beyond-130-byte-path-length.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -56,13 +44,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-19T00:03:11.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -74,12 +57,8 @@ "mtime": "2018-06-19T00:03:11.000Z", "cksum": 13611, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -93,7 +72,6 @@ "extended": { "path": "99-byte-dirname-ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc/some-unlucky-file.txt" }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -101,13 +79,8 @@ "mode": 420, "uid": 501, "gid": 20, - "uname": null, - "gname": null, "size": 560, "mtime": "2018-06-18T23:49:44.000Z", - "atime": null, - "ctime": null, - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -119,12 +92,8 @@ "mtime": "2018-06-18T23:49:44.000Z", "cksum": 13602, "linkpath": "", - "uname": null, - "gname": null, "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--filter-strict.json b/test/fixtures/parse/utf8--filter-strict.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--filter-strict.json +++ b/test/fixtures/parse/utf8--filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--filter.json b/test/fixtures/parse/utf8--filter.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--filter.json +++ b/test/fixtures/parse/utf8--filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250-filter-strict.json b/test/fixtures/parse/utf8--meta-250-filter-strict.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--meta-250-filter-strict.json +++ b/test/fixtures/parse/utf8--meta-250-filter-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250-filter.json b/test/fixtures/parse/utf8--meta-250-filter.json index b8c5bac5..7dc9969f 100644 --- a/test/fixtures/parse/utf8--meta-250-filter.json +++ b/test/fixtures/parse/utf8--meta-250-filter.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": true, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250-strict.json b/test/fixtures/parse/utf8--meta-250-strict.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8--meta-250-strict.json +++ b/test/fixtures/parse/utf8--meta-250-strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--meta-250.json b/test/fixtures/parse/utf8--meta-250.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8--meta-250.json +++ b/test/fixtures/parse/utf8--meta-250.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8--strict.json b/test/fixtures/parse/utf8--strict.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8--strict.json +++ b/test/fixtures/parse/utf8--strict.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/fixtures/parse/utf8.json b/test/fixtures/parse/utf8.json index 611f052a..4ee5a61a 100644 --- a/test/fixtures/parse/utf8.json +++ b/test/fixtures/parse/utf8.json @@ -8,23 +8,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:51:42.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836217, "nlink": 1, + "path": "Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -37,8 +26,6 @@ "size": 2, "mtime": "2017-04-10T16:51:42.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:51:42.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -53,9 +40,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -68,23 +53,12 @@ { "extended": { "atime": "2017-04-10T17:06:33.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T17:05:56.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "🌟.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836716, "nlink": 1, + "path": "🌟.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -97,8 +71,6 @@ "size": 106, "mtime": "2017-04-10T17:05:55.000Z", "atime": "2017-04-10T17:06:33.000Z", - "ctime": "2017-04-10T17:05:56.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -113,9 +85,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], @@ -128,23 +98,12 @@ { "extended": { "atime": "2017-04-10T17:02:38.000Z", - "charset": null, - "comment": null, - "ctime": "2017-04-10T16:58:47.000Z", - "gid": null, - "gname": null, - "linkpath": null, - "mtime": null, - "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", - "size": null, - "uid": null, - "uname": null, "dev": 16777220, "ino": 9836396, "nlink": 1, + "path": "long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt", "global": false }, - "globalExtended": null, "type": "File", "meta": false, "ignore": false, @@ -157,8 +116,6 @@ "size": 2, "mtime": "2017-04-10T16:58:47.000Z", "atime": "2017-04-10T17:02:38.000Z", - "ctime": "2017-04-10T16:58:47.000Z", - "linkpath": "", "header": { "cksumValid": true, "needPax": false, @@ -173,9 +130,7 @@ "uname": "isaacs", "gname": "staff", "devmaj": 0, - "devmin": 0, - "atime": null, - "ctime": null + "devmin": 0 } } ], diff --git a/test/get-write-flag.js b/test/get-write-flag.js index 81c2f547..63c4a2bf 100644 --- a/test/get-write-flag.js +++ b/test/get-write-flag.js @@ -1,67 +1,73 @@ -const t = require('tap') +import fs from 'fs' +import t from 'tap' +import { fileURLToPath } from 'url' +import { getWriteFlag } from '../dist/esm/get-write-flag.js' + +const __filename = fileURLToPath(import.meta.url) // run three scenarios // unix (no fmap) // win32 (without fmap support) // win32 (with fmap support) -const fs = require('fs') const hasFmap = !!fs.constants.UV_FS_O_FILEMAP -const platform = process.platform +const { platform } = process const UV_FS_O_FILEMAP = 0x20000000 switch (process.argv[2]) { case 'win32-fmap': { - if (!hasFmap) { - global.__FAKE_TESTING_FS__ = { - constants: { - ...fs.constants, - ...{ UV_FS_O_FILEMAP }, - }, - } - } const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants - if (platform !== 'win32') { - process.env.__FAKE_PLATFORM__ = 'win32' - } - const getFlag = require('../lib/get-write-flag.js') - t.equal(getFlag(1), UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY) - t.equal(getFlag(512 * 1024 + 1), 'w') + t.equal( + getWriteFlag(1), + UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY, + ) + t.equal(getWriteFlag(512 * 1024 + 1), 'w') break } case 'win32-nofmap': { - if (hasFmap) { - global.__FAKE_TESTING_FS__ = { - constants: { - ...fs.constants, - ...{ UV_FS_O_FILEMAP: 0 }, - }, - } - } - if (platform !== 'win32') { - process.env.__FAKE_PLATFORM__ = 'win32' - } - const getFlag = require('../lib/get-write-flag.js') - t.equal(getFlag(1), 'w') - t.equal(getFlag(512 * 1024 + 1), 'w') + t.equal(getWriteFlag(1), 'w') + t.equal(getWriteFlag(512 * 1024 + 1), 'w') break } case 'unix': { - if (platform === 'win32') { - process.env.__FAKE_PLATFORM__ = 'darwin' - } - const getFlag = require('../lib/get-write-flag.js') - t.equal(getFlag(1), 'w') - t.equal(getFlag(512 * 1024 + 1), 'w') + t.equal(getWriteFlag(1), 'w') + t.equal(getWriteFlag(512 * 1024 + 1), 'w') break } default: { const node = process.execPath - t.spawn(node, [__filename, 'win32-fmap']) - t.spawn(node, [__filename, 'win32-nofmap']) - t.spawn(node, [__filename, 'unix']) + t.spawn(node, [__filename, 'win32-fmap'], { + env: { + ...process.env, + ...(platform === 'win32' + ? {} + : { + __FAKE_FS_O_FILENAME__: String(UV_FS_O_FILEMAP), + __FAKE_PLATFORM__: 'win32', + }), + }, + }) + t.spawn(node, [__filename, 'win32-nofmap'], { + env: { + ...process.env, + ...(platform === 'win32' + ? {} + : { + __FAKE_FS_O_FILENAME__: '0', + __FAKE_PLATFORM__: 'win32', + }), + }, + }) + t.spawn(node, [__filename, 'unix'], { + env: { + ...process.env, + ...(platform === 'win32' + ? { __FAKE_PLATFORM__: 'linux' } + : {}), + }, + }) } } diff --git a/test/header.js b/test/header.js index 1a17eb83..8d3eb06e 100644 --- a/test/header.js +++ b/test/header.js @@ -1,32 +1,32 @@ -'use strict' -const t = require('tap') -const Header = require('../lib/header.js') +import t from 'tap' +import { Header } from '../dist/esm/header.js' t.test('ustar format', t => { const buf = Buffer.from( '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200037373737' + - '3737373737373700313236373735363735343000303133303531200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200037373737' + + '3737373737373700313236373735363735343000303133303531200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ path: 'foo.txt', }) const slab = Buffer.alloc(1024) - h.set({ + Object.assign(h, { mode: 0o755, uid: 24561, gid: 20, @@ -38,9 +38,11 @@ t.test('ustar format', t => { }) h.encode(slab) - t.equal(slab.slice(0, 512).toString('hex'), buf.toString('hex')) - t.equal(slab.toString('hex'), buf.toString('hex') + - (new Array(1025).join('0'))) + t.equal(slab.subarray(0, 512).toString('hex'), buf.toString('hex')) + t.equal( + slab.toString('hex'), + buf.toString('hex') + new Array(1025).join('0'), + ) const h2 = new Header(buf) @@ -64,30 +66,31 @@ t.test('ustar format', t => { t.test('xstar format', t => { const buf = Buffer.from( '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303135313331200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000031323637' + - '3735363735343000313236373735363735343000000000000000000000000000' + - // just some junk - '420420420420420420420420420420420420420420420420420420420420', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303135313331200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000031323637' + + '3735363735343000313236373735363735343000000000000000000000000000' + + // just some junk + '420420420420420420420420420420420420420420420420420420420420', + 'hex', + ) const h = new Header({ path: 'foo.txt', }) - h.set({ + Object.assign(h, { mode: 0o755, uid: 24561, gid: 20, @@ -102,7 +105,7 @@ t.test('xstar format', t => { h.encode() const slab = h.block - t.equal(slab.toString('hex'), buf.slice(0, 512).toString('hex')) + t.equal(slab.toString('hex'), buf.subarray(0, 512).toString('hex')) const h2 = new Header(buf) @@ -131,25 +134,27 @@ t.test('prefix handling', t => { t.test('no times', t => { const buf = Buffer.from( '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303337323734200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '00000000000000000030303030303020003030303030302000722f652f612f6c' + - '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + - '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + - '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + - '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + - '2f702f612f742f68000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303337323734200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '00000000000000000030303030303020003030303030302000722f652f612f6c' + + '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + + '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + + '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + + '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + + '2f702f612f742f68000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', mode: 0o755, @@ -166,14 +171,17 @@ t.test('prefix handling', t => { const b2 = Buffer.alloc(512) h.encode(b2, 0) - t.equal(b2.toString().replace(/\0+/g, ' '), - buf.toString().replace(/\0+/g, ' ')) + t.equal( + b2.toString().replace(/\0+/g, ' '), + buf.toString().replace(/\0+/g, ' '), + ) t.equal(b2.toString('hex'), buf.toString('hex')) const h2 = new Header(buf) t.match(h2, { - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', mode: 0o755, @@ -192,10 +200,13 @@ t.test('prefix handling', t => { }) t.equal(b2.toString().replace(/\0.*$/, ''), 'foo.txt') - t.equal(b2.slice(345).toString().replace(/\0.*$/, ''), 'r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/d/e/e/p/-/p/a/t/h') + t.equal( + b2.subarray(345).toString().replace(/\0.*$/, ''), + 'r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/d/e/e/p/-/p/a/t/h', + ) t.end() }) @@ -203,27 +214,29 @@ t.test('prefix handling', t => { t.test('a/c times, use shorter prefix field', t => { const buf = Buffer.from( '652f702f2d2f702f612f742f682f666f6f2e7478740000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303431353030200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '00000000000000000030303030303020003030303030302000722f652f612f6c' + - '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + - '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + - '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + - '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f65000031323637' + - '3735363735343000313236373735363735343000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303431353030200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '00000000000000000030303030303020003030303030302000722f652f612f6c' + + '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + + '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + + '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + + '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f65000031323637' + + '3735363735343000313236373735363735343000000000000000000000000000', + 'hex', + ) const h = new Header() - h.path = 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + - 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt' + h.path = + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt' h.mode = 0o755 h.uid = 24561 h.gid = 20 @@ -241,33 +254,44 @@ t.test('prefix handling', t => { const b3 = Buffer.alloc(1024) h.encode(b3, 100) - t.equal(b2.toString('hex'), b3.slice(100, 612).toString('hex')) + t.equal(b2.toString('hex'), b3.subarray(100, 612).toString('hex')) const h2 = new Header(b3, 100) - t.match(h2, { - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + - 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', - mode: 0o755, - uid: 24561, - gid: 20, - size: 100, - mtime: new Date('2016-04-01T22:00Z'), - ctime: new Date('2016-04-01T22:00Z'), - atime: new Date('2016-04-01T22:00Z'), - type: 'File', - uname: 'isaacs', - gname: 'staff', - cksumValid: true, - cksum: 17216, - needPax: false, - }, 'header from buffer') - - t.equal(b2.toString().replace(/\0.*$/, ''), 'e/p/-/p/a/t/h/foo.txt') - t.equal(b2.slice(345).toString().replace(/\0.*$/, ''), 'r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + - '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e') + t.match( + h2, + { + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/foo.txt', + mode: 0o755, + uid: 24561, + gid: 20, + size: 100, + mtime: new Date('2016-04-01T22:00Z'), + ctime: new Date('2016-04-01T22:00Z'), + atime: new Date('2016-04-01T22:00Z'), + type: 'File', + uname: 'isaacs', + gname: 'staff', + cksumValid: true, + cksum: 17216, + needPax: false, + }, + 'header from buffer', + ) + + t.equal( + b2.toString().replace(/\0.*$/, ''), + 'e/p/-/p/a/t/h/foo.txt', + ) + t.equal( + b2.subarray(345).toString().replace(/\0.*$/, ''), + 'r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-' + + '/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e', + ) t.end() }) @@ -275,27 +299,30 @@ t.test('prefix handling', t => { t.test('hella long basename', t => { const buf = Buffer.from( '6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f' + - '6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e67' + - '2d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66' + - '696c650030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303630313431200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '00000000000000000030303030303020003030303030302000722f652f612f6c' + - '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + - '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + - '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + - '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + - '2f702f612f742f68000000000000000000000000000000000000000000000000', - 'hex') + '6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e67' + + '2d66696c652d6c6f6e672d66696c652d6c6f6e672d66696c652d6c6f6e672d66' + + '696c650030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303630313431200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '00000000000000000030303030303020003030303030302000722f652f612f6c' + + '2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f79' + + '2f2d2f722f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f72' + + '2f652f612f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f722f652f61' + + '2f6c2f6c2f792f2d2f722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d' + + '2f702f612f742f68000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/' + - (new Array(20).join('long-file-')) + 'long-file.txt', + new Array(20).join('long-file-') + + 'long-file.txt', mode: 0o755, uid: 24561, gid: 20, @@ -318,7 +345,8 @@ t.test('prefix handling', t => { t.match(h2, { cksumValid: true, cksum: 24673, - path: 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + + path: + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/r/e/a/l/l/y/-/' + 'r/e/a/l/l/y/-/d/e/e/p/-/p/a/t/h/long-file-long-file-long-' + 'file-long-file-long-file-long-file-long-file-long-file-long-' + @@ -332,26 +360,30 @@ t.test('prefix handling', t => { t.test('long basename, long dirname', t => { const buf = Buffer.from( '6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d64' + - '69726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d65' + - '2d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d' + - '6469720030303037353520003035373736312000303030303234200030303030' + - '3030303134342000313236373735363735343000303334323035200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030306973616163730000000000000000000000000000000000' + - '0000000000000000007374616666000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '69726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d65' + + '2d6c6f6e672d6469726e616d652d6c6f6e672d6469726e616d652d6c6f6e672d' + + '6469720030303037353520003035373736312000303030303234200030303030' + + '3030303134342000313236373735363735343000303334323035200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030306973616163730000000000000000000000000000000000' + + '0000000000000000007374616666000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header({ - path: (new Array(30).join('long-dirname-')) + 'long-dirname/' + - (new Array(20).join('long-file-')) + 'long-file.txt', + path: + new Array(30).join('long-dirname-') + + 'long-dirname/' + + new Array(20).join('long-file-') + + 'long-file.txt', mode: 0o755, uid: 24561, gid: 20, @@ -376,7 +408,8 @@ t.test('prefix handling', t => { const h2 = new Header(b2) t.match(h2, { - path: 'long-dirname-long-dirname-long-dirname-long-dirname-' + + path: + 'long-dirname-long-dirname-long-dirname-long-dirname-' + 'long-dirname-long-dirname-long-dirname-long-dir', cksum: 14469, cksumValid: true, @@ -388,11 +421,15 @@ t.test('prefix handling', t => { }) t.test('throwers', t => { - t.throws(_ => new Header(Buffer.alloc(100)), - new Error('need 512 bytes for header')) + t.throws( + _ => new Header(Buffer.alloc(100)), + new Error('need 512 bytes for header'), + ) - t.throws(_ => new Header({}).encode(Buffer.alloc(100)), - new Error('need 512 bytes for header')) + t.throws( + _ => new Header({}).encode(Buffer.alloc(100)), + new Error('need 512 bytes for header'), + ) t.end() }) @@ -404,68 +441,72 @@ t.test('null block', t => { needPax: false, path: '', type: 'File', - mode: null, - uid: null, - gid: null, - size: null, - mtime: null, - cksum: null, + mode: undefined, + uid: undefined, + gid: undefined, + size: undefined, + mtime: undefined, + cksum: undefined, linkpath: '', - uname: null, - gname: null, + uname: undefined, + gname: undefined, devmaj: 0, devmin: 0, - atime: null, - ctime: null, + atime: undefined, + ctime: undefined, nullBlock: true, }) t.end() }) t.test('unknown type', t => { - const h = new Header(Buffer.from( - '666f6f2e74787400000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003035373736312000303030303234200030303030' + - '303030313434200031323637373536373534300030303630373620005a000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex')) - - t.equal(h.type, 'Z') - t.equal(h.typeKey, 'Z') + const h = new Header( + Buffer.from( + '666f6f2e74787400000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003035373736312000303030303234200030303030' + + '303030313434200031323637373536373534300030303630373620005a000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ), + ) + + t.equal(h.type, 'Unsupported') + t.equal(h.typeKey, 'Unsupported') t.end() }) t.test('dir as file with trailing /', t => { const b = Buffer.from( '782f792f00000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000030303030' + - '3030303030302000000000000000000000000000303034363136200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000030303030' + + '3030303030302000000000000000000000000000303034363136200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header(b) t.equal(h.type, 'Directory') b[156] = '0'.charCodeAt(0) @@ -478,24 +519,28 @@ t.test('null numeric values do not get written', t => { const b = Buffer.alloc(512) const h = new Header() h.encode(b, 0) - t.equal( - b.toString('hex'), - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000303033303737200030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000') + t.same( + b, + Buffer.from( + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000303033303737200030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ), + ) const h2 = new Header(b) t.match(h2, { type: 'File', @@ -503,19 +548,19 @@ t.test('null numeric values do not get written', t => { needPax: false, nullBlock: false, path: '', - mode: null, - uid: null, - gid: null, - size: null, - mtime: null, + mode: undefined, + uid: undefined, + gid: undefined, + size: undefined, + mtime: undefined, cksum: 1599, linkpath: '', uname: '', gname: '', devmaj: 0, devmin: 0, - atime: null, - ctime: null, + atime: undefined, + ctime: undefined, }) t.end() }) @@ -535,22 +580,23 @@ t.test('big numbers', t => { t.test('dir with long body', t => { const b = Buffer.from( '7061636b6167652f76656e646f72000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303037353520003030303030302000303030303030200030303030' + - '3030313030303020313330363133303232343120303132303236200035000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461720030300000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000003030303030302000303030303030200000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', - 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303037353520003030303030302000303030303030200030303030' + + '3030313030303020313330363133303232343120303132303236200035000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461720030300000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000003030303030302000303030303030200000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header(b) t.equal(h.type, 'Directory') t.equal(h.size, 0) @@ -558,13 +604,18 @@ t.test('dir with long body', t => { }) t.test('null block, global extended header', t => { - const h = new Header(Buffer.alloc(512), 0, { - undef: undefined, - blerg: 'bloo', - }, { - path: '/global.path', - foo: 'global foo', - }) + const h = new Header( + Buffer.alloc(512), + 0, + { + undef: undefined, + blerg: 'bloo', + }, + { + path: '/global.path', + foo: 'global foo', + }, + ) t.match(h, { cksumValid: false, needPax: false, @@ -593,22 +644,26 @@ t.test('null block, global extended header', t => { t.test('gnutar-generated 10gb file size', t => { const b = Buffer.from( '313067622e696d67000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000030303030363634003030303137353000303030313735300080000000' + - '0000000280000000313334373434303132303500303131313437002030000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0075737461722020006973616163730000000000000000000000000000000000' + - '0000000000000000006973616163730000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000', 'hex') + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000030303030363634003030303137353000303030313735300080000000' + + '0000000280000000313334373434303132303500303131313437002030000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0075737461722020006973616163730000000000000000000000000000000000' + + '0000000000000000006973616163730000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + 'hex', + ) const h = new Header(b) t.equal(h.size, 1024 * 1024 * 1024 * 10, 'should be 10gb file') + // cannot set type to something invalid + t.throws(() => h.type = 'Z') t.end() }) diff --git a/test/high-level-opt.js b/test/high-level-opt.js deleted file mode 100644 index 7a82ef3f..00000000 --- a/test/high-level-opt.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const t = require('tap') -const hlo = require('../lib/high-level-opt.js') - -t.same(hlo(), {}) - -t.same(hlo({ - C: 'dir', - f: 'file', - z: 'zip', - P: 'preserve', - U: 'unlink', - 'strip-components': 99, - foo: 'bar', -}), { - cwd: 'dir', - file: 'file', - gzip: 'zip', - preservePaths: 'preserve', - unlink: 'unlink', - strip: 99, - foo: 'bar', -}) - -t.same(hlo({ - C: 'dir', - f: 'file', - z: 'zip', - P: 'preserve', - U: 'unlink', - stripComponents: 99, - foo: 'bar', -}), { - cwd: 'dir', - file: 'file', - gzip: 'zip', - preservePaths: 'preserve', - unlink: 'unlink', - strip: 99, - foo: 'bar', -}) diff --git a/test/index.js b/test/index.js index 548a1dee..147f821f 100644 --- a/test/index.js +++ b/test/index.js @@ -1,5 +1,6 @@ -const t = require('tap') -const tar = require('../') +import t from 'tap' +import * as tar from '../dist/esm/index.js' + t.match(tar, { create: Function, c: Function, @@ -13,7 +14,7 @@ t.match(tar, { x: Function, Pack: Function, Unpack: Function, - Parse: Function, + Parser: Function, ReadEntry: Function, WriteEntry: Function, Header: Function, @@ -67,7 +68,7 @@ t.match(tar, { ]), }, }) -t.match(tar.Pack.Sync, Function) -t.match(tar.WriteEntry.Sync, Function) -t.match(tar.WriteEntry.Tar, Function) +t.match(tar.PackSync, Function) +t.match(tar.WriteEntrySync, Function) +t.match(tar.WriteEntryTar, Function) t.match(tar.Pax.parse, Function) diff --git a/test/large-numbers.js b/test/large-numbers.js index 055493e9..f2b80467 100644 --- a/test/large-numbers.js +++ b/test/large-numbers.js @@ -1,8 +1,5 @@ -'use strict' -const large = require('../lib/large-numbers.js') -const encode = large.encode -const parse = large.parse -const t = require('tap') +import t from 'tap' +import { encode, parse } from '../dist/esm/large-numbers.js' t.test('parse', t => { const cases = new Map([ diff --git a/test/list.js b/test/list.js index 26f59d9f..1533dc6d 100644 --- a/test/list.js +++ b/test/list.js @@ -1,15 +1,22 @@ -'use strict' -const t = require('tap') -const list = require('../lib/list.js') -const path = require('path') -const fs = require('fs') -const mutateFS = require('mutate-fs') +import fs, { readFileSync } from 'fs' +import mutateFS from 'mutate-fs' +import { dirname, resolve } from 'path' +import t from 'tap' +import { fileURLToPath } from 'url' +import { list } from '../dist/esm/list.js' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) + +const lp = JSON.parse( + readFileSync(__dirname + '/fixtures/parse/long-paths.json', 'utf8'), +) t.test('basic', t => { - const file = path.resolve(__dirname, 'fixtures/tars/long-paths.tar') - const expect = require('./fixtures/parse/long-paths.json').filter( - e => Array.isArray(e) && e[0] === 'entry' - ).map(e => e[1].path) + const file = resolve(__dirname, 'fixtures/tars/long-paths.tar') + const expect = lp + .filter(e => Array.isArray(e) && e[0] === 'entry') + .map(e => e[1].path) const check = (actual, t) => { t.same(actual, expect) @@ -30,30 +37,33 @@ t.test('basic', t => { return check(actual, t) }) - t.test('async promise', t => { + t.test('async promise', async t => { const actual = [] const onentry = entry => actual.push(entry.path) - return list({ - file: file, - onentry: onentry, - maxReadSize: maxReadSize, - }).then(_ => check(actual, t)) + return await list({ + file, + onentry, + maxReadSize, + }).then(() => check(actual, t)) }) t.test('async cb', t => { const actual = [] const onentry = entry => actual.push(entry.path) - list({ - file: file, - onentry: onentry, - maxReadSize: maxReadSize, - }, er => { - if (er) { - throw er - } - check(actual, t) - t.end() - }) + list( + { + file: file, + onentry: onentry, + maxReadSize: maxReadSize, + }, + er => { + if (er) { + throw er + } + check(actual, t) + t.end() + }, + ) }) t.end() }) @@ -79,7 +89,7 @@ t.test('basic', t => { t.end() }) - t.test('no onentry function', t => list({ file: file })) + t.test('no onentry function', () => list({ file: file })) t.test('limit to specific files', t => { const fileList = [ @@ -98,35 +108,43 @@ t.test('basic', t => { 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', ] - t.test('no filter function', t => { + t.test('no filter function', async t => { const check = _ => t.same(actual, expect) const actual = [] - return list({ - file: file, - onentry: entry => actual.push(entry.path), - }, fileList).then(check) + return list( + { + file: file, + onentry: entry => actual.push(entry.path), + }, + fileList, + ).then(check) }) t.test('no filter function, stream', t => { const check = _ => t.same(actual, expect) const actual = [] const onentry = entry => actual.push(entry.path) - fs.createReadStream(file).pipe(list(fileList) - .on('entry', onentry) - .on('end', _ => { - check() - t.end() - })) + fs.createReadStream(file).pipe( + list(fileList) + .on('entry', onentry) + .on('end', _ => { + check() + t.end() + }), + ) }) - t.test('filter function', t => { + t.test('filter function', async t => { const check = _ => t.same(actual, expect.slice(0, 1)) const actual = [] - return list({ - file: file, - filter: path => path === expect[0], - onentry: entry => actual.push(entry.path), - }, fileList).then(check) + return list( + { + file: file, + filter: path => path === expect[0], + onentry: entry => actual.push(entry.path), + }, + fileList, + ).then(check) }) return t.test('list is unmunged', t => { @@ -142,10 +160,14 @@ t.test('basic', t => { }) t.test('bad args', t => { - t.throws(_ => list({ file: __filename, sync: true }, _ => _), - new TypeError('callback not supported for sync tar functions')) - t.throws(_ => list(_ => _), - new TypeError('callback only supported with file option')) + t.throws( + _ => list({ file: __filename, sync: true }, _ => _), + new TypeError('callback not supported for sync tar functions'), + ) + t.throws( + _ => list(_ => _), + new TypeError('callback only supported with file option'), + ) t.end() }) @@ -172,11 +194,15 @@ t.test('read fail', t => { const poop = new Error('poop') t.teardown(mutateFS.fail('read', poop)) t.plan(1) - t.throws(_ => list({ - file: __filename, - sync: true, - maxReadSize: 10, - }), poop) + t.throws( + _ => + list({ + file: __filename, + sync: true, + maxReadSize: 10, + }), + poop, + ) }) t.test('cb', t => { const poop = new Error('poop') @@ -194,7 +220,7 @@ t.test('read fail', t => { }) t.test('noResume option', t => { - const file = path.resolve(__dirname, 'fixtures/tars/file.tar') + const file = resolve(__dirname, 'fixtures/tars/file.tar') t.test('sync', t => { let e list({ @@ -214,16 +240,18 @@ t.test('noResume option', t => { e.on('end', _ => t.end()) }) - t.test('async', t => list({ - file: file, - onentry: entry => { - process.nextTick(_ => { - t.notOk(entry.flowing) - entry.resume() - }) - }, - noResume: true, - })) + t.test('async', t => + list({ + file: file, + onentry: entry => { + process.nextTick(_ => { + t.notOk(entry.flowing) + entry.resume() + }) + }, + noResume: true, + }), + ) t.end() }) diff --git a/test/load-all.js b/test/load-all.js index 524a3f4e..111a5fb6 100644 --- a/test/load-all.js +++ b/test/load-all.js @@ -1,10 +1,16 @@ -'use strict' // just load all the files so we can't cheat coverage by avoiding something -require('../') -const fs = require('fs') -const path = require('path') -const lib = path.resolve(__dirname, '../lib') -fs.readdirSync(lib) - .filter(f => /\.js$/.test(f)) - .forEach(f => require('../lib/' + f)) -require('tap').pass('all lib files loaded') +import fs from 'fs' +import t from 'tap' +import path, { dirname } from 'path' +import { fileURLToPath } from 'url' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) +const lib = path.resolve(__dirname, '../dist/esm') +await Promise.all( + fs + .readdirSync(lib) + .filter(f => /\.js$/.test(f)) + .map(f => import('../dist/esm/' + f)), +) + +t.pass('all lib files loaded') diff --git a/test/make-tar.js b/test/make-tar.js deleted file mode 100644 index 668d2164..00000000 --- a/test/make-tar.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' -// a little utility to create virtual tar data -if (module === require.main) { - return require('tap').pass('this is fine') -} - -const Header = require('../lib/header.js') -module.exports = chunks => { - let dataLen = 0 - return Buffer.concat(chunks.map(chunk => { - if (Buffer.isBuffer(chunk)) { - dataLen += chunk.length - return chunk - } - const size = Math.max(typeof chunk === 'string' - ? 512 * Math.ceil(chunk.length / 512) - : 512) - dataLen += size - const buf = Buffer.alloc(size) - if (typeof chunk === 'string') { - buf.write(chunk) - } else { - new Header(chunk).encode(buf, 0) - } - return buf - }), dataLen) -} diff --git a/test/map.js b/test/map.js index fcd4e47f..33d3251b 100644 --- a/test/map.js +++ b/test/map.js @@ -1,7 +1,9 @@ -const t = require('tap') -const map = require('../map.js') -t.equal(map('test/index.js'), 'index.js') -t.same(map('test/unpack.js'), ['lib/unpack.js', 'lib/mkdir.js']) +import t from 'tap' +import map from '../map.js' +import { fileURLToPath } from 'url' +const __filename = fileURLToPath(import.meta.url) +t.equal(map('test/index.js'), 'src/index.ts') +t.same(map('test/unpack.js'), ['src/unpack.ts', 'src/mkdir.ts']) t.same(map('test/load-all.js'), []) t.equal(map(__filename), 'map.js') -t.equal(map('test/asdf'), 'lib/asdf') +t.equal(map('test/asdf'), 'src/asdf') diff --git a/test/mode-fix.js b/test/mode-fix.js index 779124b1..6c5b54a6 100644 --- a/test/mode-fix.js +++ b/test/mode-fix.js @@ -1,16 +1,15 @@ -'use strict' -const t = require('tap') -const mf = require('../lib/mode-fix.js') +import t from 'tap' +import { modeFix } from '../dist/esm/mode-fix.js' -t.equal(mf(0o10644, false), 0o644) -t.equal(mf(0o10644, true), 0o755) -t.equal(mf(0o10604, true), 0o705) -t.equal(mf(0o10600, true), 0o700) -t.equal(mf(0o10066, true), 0o077) +t.equal(modeFix(0o10644, false), 0o644) +t.equal(modeFix(0o10644, true), 0o755) +t.equal(modeFix(0o10604, true), 0o705) +t.equal(modeFix(0o10600, true), 0o700) +t.equal(modeFix(0o10066, true), 0o077) -t.equal(mf(0o10664, false, true), 0o644) -t.equal(mf(0o10066, false, true), 0o644) -t.equal(mf(0o10666, true, true), 0o755) -t.equal(mf(0o10604, true, true), 0o705) -t.equal(mf(0o10600, true, true), 0o700) -t.equal(mf(0o10066, true, true), 0o755) +t.equal(modeFix(0o10664, false, true), 0o644) +t.equal(modeFix(0o10066, false, true), 0o644) +t.equal(modeFix(0o10666, true, true), 0o755) +t.equal(modeFix(0o10604, true, true), 0o705) +t.equal(modeFix(0o10600, true, true), 0o700) +t.equal(modeFix(0o10066, true, true), 0o755) diff --git a/test/normalize-unicode.js b/test/normalize-unicode.js index 0d34f38c..969ee6ca 100644 --- a/test/normalize-unicode.js +++ b/test/normalize-unicode.js @@ -1,18 +1,32 @@ +import t from 'tap' + process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' -const t = require('tap') -const normalize = require('../lib/normalize-unicode.js') -const stripSlash = require('../lib/strip-trailing-slashes.js') -const normPath = require('../lib/normalize-windows-path.js') + +const [ + { normalizeUnicode }, + { stripTrailingSlashes }, + { normalizeWindowsPath }, +] = await Promise.all([ + import('../dist/esm/normalize-unicode.js'), + import('../dist/esm/strip-trailing-slashes.js'), + import('../dist/esm/normalize-windows-path.js'), +]) // café const cafe1 = Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() // cafe with a ` -const cafe2 = Buffer.from([0x63, 0x61, 0x66, 0x65, 0xcc, 0x81]).toString() +const cafe2 = Buffer.from([ + 0x63, 0x61, 0x66, 0x65, 0xcc, 0x81, +]).toString() -t.equal(normalize(cafe1), normalize(cafe2), 'matching unicodes') -t.equal(normalize(cafe1), normalize(cafe2), 'cached') -t.equal(normalize('foo'), 'foo', 'non-unicode string') +t.equal( + normalizeUnicode(cafe1), + normalizeUnicode(cafe2), + 'matching unicodes', +) +t.equal(normalizeUnicode(cafe1), normalizeUnicode(cafe2), 'cached') +t.equal(normalizeUnicode('foo'), 'foo', 'non-unicode string') t.test('normalize with strip slashes', t => { const paths = [ @@ -28,8 +42,12 @@ t.test('normalize with strip slashes', t => { for (const path of paths) { t.test(JSON.stringify(path), t => { - const a = normalize(stripSlash(normPath(path))) - const b = stripSlash(normPath(normalize(path))) + const a = normalizeUnicode( + stripTrailingSlashes(normalizeWindowsPath(path)), + ) + const b = stripTrailingSlashes( + normalizeWindowsPath(normalizeUnicode(path)), + ) t.matchSnapshot(a, 'normalized') t.equal(a, b, 'order should not matter') t.end() diff --git a/test/normalize-windows-path.js b/test/normalize-windows-path.js index e9c705ab..8fbaa647 100644 --- a/test/normalize-windows-path.js +++ b/test/normalize-windows-path.js @@ -1,28 +1,38 @@ -const t = require('tap') +import t from 'tap' const realPlatform = process.platform const fakePlatform = realPlatform === 'win32' ? 'posix' : 'win32' -t.test('posix', t => { +t.test('posix', async t => { if (realPlatform === 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = fakePlatform } else { delete process.env.TESTING_TAR_FAKE_PLATFORM } - const normPath = t.mock('../lib/normalize-windows-path.js') - t.equal(normPath('/some/path/back\\slashes'), '/some/path/back\\slashes') - t.equal(normPath('c:\\foo\\bar'), 'c:\\foo\\bar') + const { normalizeWindowsPath } = await t.mockImport( + '../dist/esm/normalize-windows-path.js', + ) + t.equal( + normalizeWindowsPath('/some/path/back\\slashes'), + '/some/path/back\\slashes', + ) + t.equal(normalizeWindowsPath('c:\\foo\\bar'), 'c:\\foo\\bar') t.end() }) -t.test('win32', t => { +t.test('win32', async t => { if (realPlatform !== 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = fakePlatform } else { delete process.env.TESTING_TAR_FAKE_PLATFORM } - const normPath = t.mock('../lib/normalize-windows-path.js') - t.equal(normPath('/some/path/back\\slashes'), '/some/path/back/slashes') - t.equal(normPath('c:\\foo\\bar'), 'c:/foo/bar') + const { normalizeWindowsPath } = await t.mockImport( + '../dist/esm/normalize-windows-path.js', + ) + t.equal( + normalizeWindowsPath('/some/path/back\\slashes'), + '/some/path/back/slashes', + ) + t.equal(normalizeWindowsPath('c:\\foo\\bar'), 'c:/foo/bar') t.end() }) diff --git a/test/options.js b/test/options.js new file mode 100644 index 00000000..5e10df5b --- /dev/null +++ b/test/options.js @@ -0,0 +1,65 @@ +import t from 'tap' +import { + dealias, + isSync, + isSyncFile, + isFile, +} from '../dist/esm/options.js' + +t.same(dealias(), {}) +t.same(dealias(false), {}) + +t.same( + dealias({ + C: 'dir', + f: 'file', + z: 'zip', + P: 'preserve', + U: 'unlink', + 'strip-components': 99, + foo: 'bar', + }), + { + cwd: 'dir', + file: 'file', + gzip: 'zip', + preservePaths: 'preserve', + unlink: 'unlink', + strip: 99, + foo: 'bar', + }, +) + +t.same( + dealias({ + C: 'dir', + f: 'file', + z: 'zip', + P: 'preserve', + U: 'unlink', + stripComponents: 99, + foo: 'bar', + }), + { + cwd: 'dir', + file: 'file', + gzip: 'zip', + preservePaths: 'preserve', + unlink: 'unlink', + strip: 99, + foo: 'bar', + }, +) + +t.equal(isSyncFile(dealias({ sync: true, f: 'x' })), true) +t.equal(isSyncFile(dealias({ file: 'x' })), false) +t.equal(isSyncFile(dealias({ sync: true })), false) +t.equal(isSyncFile(dealias({})), false) +t.equal(isSync(dealias({ sync: true, f: 'x' })), true) +t.equal(isSync(dealias({ file: 'x' })), false) +t.equal(isSync(dealias({ sync: true })), true) +t.equal(isSync(dealias({})), false) +t.equal(isFile(dealias({ sync: true, f: 'x' })), true) +t.equal(isFile(dealias({ file: 'x' })), true) +t.equal(isFile(dealias({ sync: true })), false) +t.equal(isFile(dealias({})), false) diff --git a/test/pack.js b/test/pack.js index a4f8bfbe..dae2fba9 100644 --- a/test/pack.js +++ b/test/pack.js @@ -1,37 +1,45 @@ -'use strict' -const t = require('tap') -const Pack = require('../lib/pack.js') -const PackSync = Pack.Sync -const fs = require('fs') -const path = require('path') +import t from 'tap' +import { Pack, PackSync } from '../dist/esm/pack.js' +import fs from 'fs' +import path from 'path' +import { fileURLToPath } from 'url' + +import { Header } from '../dist/esm/header.js' +import zlib from 'zlib' +import * as miniz from 'minizlib' +import mutateFS from 'mutate-fs' +import { Minipass } from 'minipass' +import EE from 'events' +import { rimraf } from 'rimraf' +import { mkdirp } from 'mkdirp' +import { ReadEntry } from '../dist/esm/read-entry.js' +import { normalizeWindowsPath as normPath } from '../dist/esm/normalize-windows-path.js' + +const { default: chmodr } = await import('chmodr') + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + const fixtures = path.resolve(__dirname, 'fixtures') const files = path.resolve(fixtures, 'files') const tars = path.resolve(fixtures, 'tars') -const chmodr = require('chmodr') -const Header = require('../lib/header.js') -const zlib = require('zlib') -const miniz = require('minizlib') -const mutateFS = require('mutate-fs') -const { Minipass } = require('minipass') + process.env.USER = 'isaacs' -const EE = require('events').EventEmitter -const rimraf = require('rimraf') -const mkdirp = require('mkdirp') -const ReadEntry = require('../lib/read-entry.js') const isWindows = process.platform === 'win32' -const normPath = require('../lib/normalize-windows-path.js') const ctime = new Date('2017-05-10T01:03:12.000Z') const atime = new Date('2017-04-17T00:00:00.000Z') const mtime = new Date('2016-04-01T19:00:00.000Z') -t.teardown(mutateFS.statMutate((er, st) => { - if (st) { - st.ctime = ctime - st.atime = atime - st.mtime = mtime - } -})) +t.teardown( + mutateFS.statMutate((_er, st) => { + if (st) { + st.ctime = ctime + st.atime = atime + st.mtime = mtime + } + }), +) t.test('set up', t => { const one = fs.statSync(files + '/hardlink-1') @@ -54,7 +62,7 @@ t.test('pack a file', t => { .on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(512).toString(), /^a\0{511}\0{1024}$/) + t.match(data.subarray(512).toString(), /^a\0{511}\0{1024}$/) const h = new Header(data) const expect = { cksumValid: true, @@ -84,7 +92,7 @@ t.test('pack a file', t => { throw new Error('no data!') } - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + t.equal(sync.subarray(512).toString(), data.subarray(512).toString()) const hs = new Header(sync) t.match(hs, expect) t.end() @@ -93,13 +101,13 @@ t.test('pack a file', t => { t.test('pack a file with a prefix', t => { const out = [] - new Pack({ cwd: files, prefix: 'package/' }) + new Pack({ mtime, cwd: files, prefix: 'package/' }) .end('.dotfile') .on('data', c => out.push(c)) .on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(512).toString(), /^.\n\0{510}\0{1024}$/) + t.match(data.subarray(512).toString(), /^.\n\0{510}\0{1024}$/) const h = new Header(data) const expect = { cksumValid: true, @@ -121,8 +129,10 @@ t.test('pack a file with a prefix', t => { } t.match(h, expect) const sync = new PackSync({ cwd: files, prefix: 'package' }) - .add('.dotfile').end().read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + .add('.dotfile') + .end() + .read() + t.equal(sync.subarray(512).toString(), data.subarray(512).toString()) const hs = new Header(sync) t.match(hs, expect) t.end() @@ -163,15 +173,21 @@ t.test('portable pack a dir', t => { } t.match(h, expect) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0{1024}$/) + t.match(data.subarray(1024).toString(), /^\0{1024}$/) - const syncgz = new PackSync({ cwd: files, portable: true, gzip: true }) - .add('dir').end().read() + const syncgz = new PackSync({ + cwd: files, + portable: true, + gzip: true, + }) + .add('dir') + .end() + .read() t.equal(syncgz[9], 255, 'gzip OS flag set to "unknown"') const sync = new miniz.Gunzip().end(zipped).read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + t.equal(sync.subarray(512).toString(), data.subarray(512).toString()) const hs = new Header(sync) t.match(hs, expect) @@ -193,8 +209,8 @@ t.test('portable pack a dir', t => { ctime: null, nullBlock: false, } - t.match(new Header(data.slice(512)), expect2) - t.match(new Header(sync.slice(512)), expect2) + t.match(new Header(data.subarray(512)), expect2) + t.match(new Header(sync.subarray(512)), expect2) t.end() }) }) @@ -235,11 +251,13 @@ t.test('use process cwd if cwd not specified', t => { } t.match(h, expect) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0{1024}$/) + t.match(data.subarray(1024).toString(), /^\0{1024}$/) const sync = new PackSync({ cwd: files }) - .add('dir').end().read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + .add('dir') + .end() + .read() + t.equal(sync.subarray(512).toString(), data.subarray(512).toString()) const hs = new Header(sync) t.match(hs, expect) @@ -261,15 +279,15 @@ t.test('use process cwd if cwd not specified', t => { ctime: ctime, nullBlock: false, } - t.match(new Header(data.slice(512)), expect2) - t.match(new Header(sync.slice(512)), expect2) + t.match(new Header(data.subarray(512)), expect2) + t.match(new Header(sync.subarray(512)), expect2) t.end() }) }) t.test('filter', t => { const out = [] - const filter = (path, stat) => stat.isDirectory() + const filter = (_path, stat) => stat.isDirectory() // only include directories, so dir/x should not appear new Pack({ cwd: files, filter: filter }) @@ -301,11 +319,13 @@ t.test('filter', t => { } t.match(h, expect) t.equal(data.length, 1536) - t.match(data.slice(512).toString(), /^\0{1024}$/) + t.match(data.subarray(512).toString(), /^\0{1024}$/) const sync = new PackSync({ cwd: files, filter: filter }) - .add('dir').end().read() - t.equal(sync.slice(512).toString(), data.slice(512).toString()) + .add('dir') + .end() + .read() + t.equal(sync.subarray(512).toString(), data.subarray(512).toString()) const hs = new Header(sync) t.match(hs, expect) t.end() @@ -314,7 +334,7 @@ t.test('filter', t => { t.test('add the same dir twice (exercise cache code)', t => { const out = [] - const filter = (path, stat) => stat.isDirectory() + const filter = (_path, stat) => stat.isDirectory() // only include directories, so dir/x should not appear const pack = new Pack({ cwd: files, filter: filter }) @@ -346,10 +366,10 @@ t.test('add the same dir twice (exercise cache code)', t => { nullBlock: false, } t.match(h, expect) - const h2 = new Header(data.slice(512)) + const h2 = new Header(data.subarray(512)) t.match(h2, expect) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0{1024}$/) + t.match(data.subarray(1024).toString(), /^\0{1024}$/) const sync = new PackSync({ cwd: files, @@ -358,11 +378,17 @@ t.test('add the same dir twice (exercise cache code)', t => { readdirCache: pack.readdirCache, statCache: pack.statCache, }) - .add('dir').add('dir').end().read() - t.equal(sync.slice(1024).toString(), data.slice(1024).toString()) + .add('dir') + .add('dir') + .end() + .read() + t.equal( + sync.subarray(1024).toString(), + data.subarray(1024).toString(), + ) const hs = new Header(sync) t.match(hs, expect) - const hs2 = new Header(sync.slice(512)) + const hs2 = new Header(sync.subarray(512)) t.match(hs2, expect) t.end() }) @@ -384,7 +410,10 @@ t.test('if brotli is truthy, make it an object', t => { t.test('throws if both gzip and brotli are truthy', t => { const opt = { gzip: true, brotli: true } - t.throws(_ => new Pack(opt), new TypeError('gzip and brotli are mutually exclusive')) + t.throws( + _ => new Pack(opt), + new TypeError('gzip and brotli are mutually exclusive'), + ) t.end() }) @@ -404,14 +433,16 @@ t.test('gzip, also a very deep path', t => { const data = zlib.unzipSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -436,27 +467,72 @@ t.test('gzip, also a very deep path', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && + ok = + ok && t.equal(entry[0], expect[i][0]) && t.equal(entry[1], expect[i][1]) && (!entry[2] || t.equal(entry[2], expect[i][2])) @@ -483,14 +559,16 @@ t.test('brotli, also a very deep path', t => { const data = zlib.brotliDecompressSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -515,30 +593,75 @@ t.test('brotli, also a very deep path', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && - t.equal(entry[0], expect[i][0]) && - t.equal(entry[1], expect[i][1]) && - (!entry[2] || t.equal(entry[2], expect[i][2])) + ok = + ok && + t.equal(entry[0], expect[i][0]) && + t.equal(entry[1], expect[i][1]) && + (!entry[2] || t.equal(entry[2], expect[i][2])) }) t.end() @@ -549,7 +672,8 @@ t.test('very deep gzip path, sync', t => { const pack = new PackSync({ cwd: files, gzip: true, - }).add('dir') + }) + .add('dir') .add('long-path') .end() @@ -562,14 +686,16 @@ t.test('very deep gzip path, sync', t => { const data = zlib.unzipSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -596,25 +722,64 @@ t.test('very deep gzip path, sync', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && + ok = + ok && t.equal(entry[0], expect[i][0]) && t.equal(entry[1], expect[i][1]) && (!entry[2] || t.equal(entry[2], expect[i][2])) @@ -628,7 +793,8 @@ t.test('very deep brotli path, sync', t => { const pack = new PackSync({ cwd: files, brotli: true, - }).add('dir') + }) + .add('dir') .add('long-path') .end() @@ -641,14 +807,16 @@ t.test('very deep brotli path, sync', t => { const data = zlib.brotliDecompressSync(zipped) const entries = [] for (var i = 0; i < data.length; i += 512) { - const slice = data.slice(i, i + 512) + const slice = data.subarray(i, i + 512) const h = new Header(slice) if (h.nullBlock) { entries.push('null block') } else if (h.cksumValid) { entries.push([h.type, h.path]) } else if (entries[entries.length - 1][0] === 'File') { - entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, '')) + entries[entries.length - 1].push( + slice.toString().replace(/\0.*$/, ''), + ) } } @@ -675,28 +843,67 @@ t.test('very deep brotli path, sync', t => { ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'], ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'], - ['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'], - ['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'], - ['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + ], + [ + 'Directory', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', + 'short\n', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111', + ], + [ + 'ExtendedHeader', + 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222', + ], + [ + 'ExtendedHeader', + 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc', + ], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ], ['ExtendedHeader', 'PaxHeader/Ω.txt'], - ['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'], + [ + 'File', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', + 'Ω', + ], 'null block', 'null block', ] let ok = true entries.forEach((entry, i) => { - ok = ok && + ok = + ok && t.equal(entry[0], expect[i][0]) && - t.equal(entry[1], expect[i][1]) && - (!entry[2] || t.equal(entry[2], expect[i][2])) + t.equal(entry[1], expect[i][1]) && + (!entry[2] || t.equal(entry[2], expect[i][2])) }) t.end() @@ -711,8 +918,10 @@ t.test('write after end', t => { t.test('emit error when stat fail', t => { t.teardown(mutateFS.statFail(new Error('xyz'))) - t.throws(_ => new PackSync({ cwd: files }).add('one-byte.txt'), - new Error('xyz')) + t.throws( + _ => new PackSync({ cwd: files }).add('one-byte.txt'), + new Error('xyz'), + ) new Pack({ cwd: files }).add('one-byte.txt').on('error', e => { t.match(e, { message: 'xyz' }) @@ -722,7 +931,10 @@ t.test('emit error when stat fail', t => { t.test('readdir fail', t => { t.teardown(mutateFS.fail('readdir', new Error('xyz'))) - t.throws(_ => new PackSync({ cwd: files }).add('dir'), new Error('xyz')) + t.throws( + _ => new PackSync({ cwd: files }).add('dir'), + new Error('xyz'), + ) new Pack({ cwd: files }).add('dir').on('error', e => { t.match(e, { message: 'xyz' }) @@ -764,7 +976,7 @@ t.test('pipe into a slow reader', t => { } t.match(h, expect) t.equal(data.length, 21504) - t.match(data.slice(data.length - 1024).toString(), /^\0{1024}$/) + t.match(data.subarray(data.length - 1024).toString(), /^\0{1024}$/) t.end() }) }) @@ -772,11 +984,13 @@ t.test('pipe into a slow reader', t => { t.test('pipe into a slow gzip reader', t => { const out = [] const mp2 = new miniz.Unzip() - const p = new Pack({ cwd: files, gzip: true }).add('long-path').end() + const p = new Pack({ cwd: files, gzip: true }) + .add('long-path') + .end() p.pause() class SlowStream extends EE { - write (chunk) { + write(chunk) { mp2.write(chunk) setTimeout(_ => { this.emit('drain') @@ -785,7 +999,7 @@ t.test('pipe into a slow gzip reader', t => { return false } - end (chunk) { + end(chunk) { return mp2.end(chunk) } } @@ -823,7 +1037,7 @@ t.test('pipe into a slow gzip reader', t => { } t.match(h, expect) t.equal(data.length, 21504) - t.match(data.slice(data.length - 1024).toString(), /^\0{1024}$/) + t.match(data.subarray(data.length - 1024).toString(), /^\0{1024}$/) t.end() }) }) @@ -835,12 +1049,12 @@ t.test('ignores mid-queue', t => { let didFirst = false const p = new Pack({ cwd: tars, - filter: (p, st) => { + filter: (p, _st) => { if (p === './') { return true } if (!didFirst) { - return didFirst = true + return (didFirst = true) } return false }, @@ -852,8 +1066,8 @@ t.test('ignores mid-queue', t => { p.on('data', c => out.push(c)) p.on('end', _ => { const data = Buffer.concat(out) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), './') - const file = data.slice(512, 612).toString().replace(/\0.*$/, '') + t.equal(data.subarray(0, 100).toString().replace(/\0.*$/, ''), './') + const file = data.subarray(512, 612).toString().replace(/\0.*$/, '') t.not(files.indexOf(file), -1) t.end() }) @@ -869,17 +1083,21 @@ t.test('warnings', t => { const p = new Pack({ cwd: files, onwarn: (c, m, p) => warnings.push([c, m, p]), - }).end(f).on('data', c => out.push(c)) + }) + .end(f) + .on('data', c => out.push(c)) const out = [] p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(warnings, [[ - 'TAR_ENTRY_INFO', - /stripping .* from absolute path/, - { path: normPath(f) }, - ]]) + t.match(warnings, [ + [ + 'TAR_ENTRY_INFO', + /stripping .* from absolute path/, + { path: normPath(f) }, + ], + ]) t.match(new Header(data), { path: normPath(f).replace(/^(\/|[a-z]:\/)/i, ''), @@ -900,7 +1118,9 @@ t.test('warnings', t => { strict: strict, preservePaths: true, onwarn: (c, m, p) => warnings.push([c, m, p]), - }).end(f).on('data', c => out.push(c)) + }) + .end(f) + .on('data', c => out.push(c)) p.on('end', _ => { const data = Buffer.concat(out) t.equal(warnings.length, 0) @@ -918,13 +1138,15 @@ t.test('warnings', t => { new Pack({ strict: true, cwd: files, - }).end(f).on('error', e => { - t.match(e, { - message: /stripping .* from absolute path/, - path: normPath(f), - }) - t.end() }) + .end(f) + .on('error', e => { + t.match(e, { + message: /stripping .* from absolute path/, + path: normPath(f), + }) + t.end() + }) }) t.end() @@ -963,7 +1185,7 @@ t.test('no dir recurse', t => { }) t.test('sync', t => { - const p = new Pack.Sync({ + const p = new PackSync({ cwd: dir, noDirRecurse: true, }) @@ -975,48 +1197,57 @@ t.test('no dir recurse', t => { t.end() }) -t.test('follow', { skip: isWindows && 'file symlinks not available' }, t => { - const check = (out, t) => { - const data = Buffer.concat(out) - t.equal(data.length, 2048) - t.match(new Header(data, 0), { - type: 'File', - cksumValid: true, - needPax: false, - path: 'symlink', - mode: isWindows ? 0o666 : 0o644, - size: 26, - }) - t.match(data.slice(512).toString(), /this link is like diamond\n\0+$/) - t.end() - } +t.test( + 'follow', + { skip: isWindows && 'file symlinks not available' }, + t => { + const check = (out, t) => { + const data = Buffer.concat(out) + t.equal(data.length, 2048) + t.match(new Header(data, 0), { + type: 'File', + cksumValid: true, + needPax: false, + path: 'symlink', + mode: isWindows ? 0o666 : 0o644, + size: 26, + }) + t.match( + data.subarray(512).toString(), + /this link is like diamond\n\0+$/, + ) + t.end() + } - t.test('async', t => { - const out = [] - const p = new Pack({ cwd: files, follow: true }) - p.on('data', c => out.push(c)) - p.on('end', _ => check(out, t)) - p.end('symlink') - }) + t.test('async', t => { + const out = [] + const p = new Pack({ cwd: files, follow: true }) + p.on('data', c => out.push(c)) + p.on('end', _ => check(out, t)) + p.end('symlink') + }) - t.test('sync', t => { - const out = [] - const p = new Pack.Sync({ cwd: files, follow: true }) - p.on('data', c => out.push(c)) - p.end('symlink') - check(out, t) - }) + t.test('sync', t => { + const out = [] + const p = new PackSync({ cwd: files, follow: true }) + p.on('data', c => out.push(c)) + p.end('symlink') + check(out, t) + }) - t.end() -}) + t.end() + }, +) t.test('pack ReadEntries', t => { t.test('basic', t => { - const readEntry = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: 1, - })) + const readEntry = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: 1, + }), + ) const p = new Pack() p.end(readEntry) const out = [] @@ -1024,9 +1255,9 @@ t.test('pack ReadEntries', t => { p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0+$/) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'x') - t.equal(data.slice(512, 514).toString(), 'x\0') + t.match(data.subarray(1024).toString(), /^\0+$/) + t.equal(data.subarray(0, 100).toString().replace(/\0.*$/, ''), 'x') + t.equal(data.subarray(512, 514).toString(), 'x\0') t.end() }) const buf = Buffer.alloc(512) @@ -1035,11 +1266,13 @@ t.test('pack ReadEntries', t => { }) t.test('prefix', t => { - const readEntry = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: 1, - })) + const readEntry = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: 1, + }), + ) const p = new Pack({ prefix: 'y' }) p.end(readEntry) const out = [] @@ -1047,9 +1280,12 @@ t.test('pack ReadEntries', t => { p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0+$/) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'y/x') - t.equal(data.slice(512, 514).toString(), 'x\0') + t.match(data.subarray(1024).toString(), /^\0+$/) + t.equal( + data.subarray(0, 100).toString().replace(/\0.*$/, ''), + 'y/x', + ) + t.equal(data.subarray(512, 514).toString(), 'x\0') t.end() }) const buf = Buffer.alloc(512) @@ -1058,21 +1294,27 @@ t.test('pack ReadEntries', t => { }) t.test('filter out', t => { - const re1 = new ReadEntry(new Header({ - path: 'a', - type: 'File', - size: 1, - })) - const re2 = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: 1, - })) - const re3 = new ReadEntry(new Header({ - path: 'y', - type: 'File', - size: 1, - })) + const re1 = new ReadEntry( + new Header({ + path: 'a', + type: 'File', + size: 1, + }), + ) + const re2 = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: 1, + }), + ) + const re3 = new ReadEntry( + new Header({ + path: 'y', + type: 'File', + size: 1, + }), + ) const p = new Pack({ filter: p => p === 'x' }) p.add(re1) p.add(re2) @@ -1082,9 +1324,9 @@ t.test('pack ReadEntries', t => { p.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 2048) - t.match(data.slice(1024).toString(), /^\0+$/) - t.equal(data.slice(0, 100).toString().replace(/\0.*$/, ''), 'x') - t.equal(data.slice(512, 514).toString(), 'x\0') + t.match(data.subarray(1024).toString(), /^\0+$/) + t.equal(data.subarray(0, 100).toString().replace(/\0.*$/, ''), 'x') + t.equal(data.subarray(512, 514).toString(), 'x\0') t.end() }) { @@ -1119,7 +1361,7 @@ t.test('filter out everything', t => { t.test('sync', t => { const out = [] - const p = new Pack.Sync({ cwd: files, filter: filter }) + const p = new PackSync({ cwd: files, filter: filter }) p.on('data', c => out.push(c)) p.end('./') check(out, t) @@ -1127,7 +1369,7 @@ t.test('filter out everything', t => { t.test('async', t => { const out = [] - const p = new Pack.Sync({ cwd: files, filter: filter }) + const p = new PackSync({ cwd: files, filter: filter }) p.on('data', c => out.push(c)) p.on('end', _ => check(out, t)) p.end('./') @@ -1149,41 +1391,53 @@ t.test('fs.open fails', t => { t.test('sync', t => { t.plan(1) - t.throws(_ => - new Pack.Sync({ cwd: files }).end('one-byte.txt'), poop) + t.throws( + _ => new PackSync({ cwd: files }).end('one-byte.txt'), + poop, + ) }) t.end() }) -const write = opts => new Promise((resolve, reject) => { - const p = new Pack() - let totalSize = 0 - p.on('data', d => totalSize += d.length) - p.once('error', reject) - p.once('end', () => resolve(totalSize)) - - const file1 = new ReadEntry(new Header({ - path: 'file1.txt', - size: 5, - })) - if (opts.before) { - file1.end('file1') - p.add(file1) - } else { - p.add(file1) - file1.end('file1') - } +const write = opts => + new Promise((resolve, reject) => { + const p = new Pack() + let totalSize = 0 + p.on('data', d => (totalSize += d.length)) + p.once('error', reject) + p.once('end', () => resolve(totalSize)) + + const file1 = new ReadEntry( + new Header({ + path: 'file1.txt', + size: 5, + type: 'File', + }), + ) + if (opts.before) { + file1.end('file1') + p.add(file1) + } else { + p.add(file1) + file1.end('file1') + } - p.end() -}) + p.end() + }) t.test('padding works regardless of arite/add order', t => Promise.all([ write({ before: true }), write({ before: false }), ]).then(res => - t.equal(res[0], res[1], 'length is the same regardless of write/add order'))) + t.equal( + res[0], + res[1], + 'length is the same regardless of write/add order', + ), + ), +) t.test('prefix and subdirs', t => { const dir = path.resolve(fixtures, 'pack-prefix-subdirs') @@ -1214,7 +1468,8 @@ t.test('prefix and subdirs', t => { const check = (out, t) => { const data = Buffer.concat(out) expect.forEach((e, i) => - t.equal(e, data.slice(i * 512, i * 512 + e.length).toString())) + t.equal(e, data.subarray(i * 512, i * 512 + e.length).toString()), + ) t.end() } @@ -1235,8 +1490,8 @@ t.test('prefix and subdirs', t => { }) return t.test('sync', t => { - t.test('.', t => runTest(t, '.', Pack.Sync)) - return t.test('./', t => runTest(t, './', Pack.Sync)) + t.test('.', t => runTest(t, '.', PackSync)) + return t.test('./', t => runTest(t, './', PackSync)) }) }) @@ -1291,9 +1546,9 @@ t.test('prefix and hard links', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal(data.subarray(i * 512, i * 512 + e.length).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) t.end() @@ -1330,8 +1585,8 @@ t.test('prefix and hard links', t => { }) t.test('sync', t => { - t.test('.', t => runTest(t, '.', Pack.Sync)) - return t.test('./', t => runTest(t, './', Pack.Sync)) + t.test('.', t => runTest(t, '.', PackSync)) + return t.test('./', t => runTest(t, './', PackSync)) }) t.end() diff --git a/test/parse.js b/test/parse.js index 2cc68782..ebee05e6 100644 --- a/test/parse.js +++ b/test/parse.js @@ -1,24 +1,30 @@ -'use strict' -const t = require('tap') -const Parse = require('../lib/parse.js') - -const makeTar = require('./make-tar.js') -const fs = require('fs') -const path = require('path') +import t from 'tap' +import { Parser } from '../dist/esm/parse.js' +import { makeTar } from './fixtures/make-tar.js' +import fs, { readFileSync } from 'fs' +import path, { dirname } from 'path' +import zlib from 'zlib' +import { Minipass } from 'minipass' +import { Header } from '../dist/esm/header.js' +import EE from 'events' +import { fileURLToPath } from 'url' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) const tardir = path.resolve(__dirname, 'fixtures/tars') -const zlib = require('zlib') -const { Minipass } = require('minipass') -const Header = require('../lib/header.js') -const EE = require('events').EventEmitter t.test('fixture tests', t => { class ByteStream extends Minipass { - write (chunk) { + write(chunk) { for (let i = 0; i < chunk.length - 1; i++) { - super.write(chunk.slice(i, i + 1)) + super.write(chunk.subarray(i, i + 1)) } - return super.write(chunk.slice(chunk.length - 1, chunk.length)) + const ret = super.write(chunk.subarray(chunk.length - 1, chunk.length)) + if (ret === false) { + throw new Error('BS write return false') + } + return ret } } @@ -26,21 +32,28 @@ t.test('fixture tests', t => { let ok = true let cursor = 0 p.on('entry', entry => { - ok = ok && t.match(['entry', entry], expect[cursor++], entry.path) + ok = + ok && t.match(['entry', entry], expect[cursor++], entry.path) if (slow) { - setTimeout(_ => entry.resume()) + setTimeout(() => entry.resume()) } else { entry.resume() } }) p.on('ignoredEntry', entry => { - ok = ok && t.match(['ignoredEntry', entry], expect[cursor++], - 'ignored: ' + entry.path) + ok = + ok && + t.match( + ['ignoredEntry', entry], + expect[cursor++], + 'ignored: ' + entry.path, + ) }) - p.on('warn', (c, message, data) => { - ok = ok && t.match(['warn', c, message], expect[cursor++], 'warn') + p.on('warn', (c, message, _data) => { + ok = + ok && t.match(['warn', c, message], expect[cursor++], 'warn') }) - p.on('nullBlock', _ => { + p.on('nullBlock', () => { ok = ok && t.match(['nullBlock'], expect[cursor++], 'null') }) p.on('error', er => { @@ -49,204 +62,261 @@ t.test('fixture tests', t => { p.on('meta', meta => { ok = ok && t.match(['meta', meta], expect[cursor++], 'meta') }) - p.on('eof', _ => { + p.on('eof', () => { ok = ok && t.match(['eof'], expect[cursor++], 'eof') }) - p.on('end', _ => { + p.on('end', () => { ok = ok && t.match(['end'], expect[cursor++], 'end') t.end() }) } t.jobs = 4 - const path = require('path') const parsedir = path.resolve(__dirname, 'fixtures/parse') const files = fs.readdirSync(tardir) - const maxMetaOpt = [250, null] + const maxMetaOpt = [250, undefined] const filterOpt = [true, false] const strictOpt = [true, false] const runTest = (file, maxMeta, filter, strict) => { const tardata = fs.readFileSync(file) const base = path.basename(file, '.tar') - t.test('file=' + base + '.tar' + - ' maxmeta=' + maxMeta + - ' filter=' + filter + - ' strict=' + strict, t => { - const o = - (maxMeta ? '-meta-' + maxMeta : '') + - (filter ? '-filter' : '') + - (strict ? '-strict' : '') - const tail = (o ? '-' + o : '') + '.json' - const eventsFile = parsedir + '/' + base + tail - const expect = require(eventsFile) - - t.test('uncompressed one byte at a time', t => { - const bs = new ByteStream() - const opt = (maxMeta || filter || strict) ? { - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - } : null - const bp = new Parse(opt) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(tardata) - }) - - t.test('uncompressed all at once', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + t.test( + 'file=' + + base + + '.tar' + + ' maxmeta=' + + maxMeta + + ' filter=' + + filter + + ' strict=' + + strict, + t => { + const o = + (maxMeta ? '-meta-' + maxMeta : '') + + (filter ? '-filter' : '') + + (strict ? '-strict' : '') + const tail = (o ? '-' + o : '') + '.json' + const eventsFile = parsedir + '/' + base + tail + const expect = JSON.parse(readFileSync(eventsFile, 'utf8')) + + t.test('uncompressed one byte at a time', t => { + const bs = new ByteStream() + bs.on('data', c => { + if (!Buffer.isBuffer(c)) throw new Error('wat1') + if (c.length !== 1) throw new Error('wat2') + }) + const opt = + maxMeta || filter || strict + ? { + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + } + : undefined + const p = new Parser(opt) + trackEvents(t, expect, p) + bs.pipe(p) + bs.write(tardata) + bs.end() }) - trackEvents(t, expect, p) - p.end(tardata) - }) - - t.test('uncompressed one byte at a time, filename .tbr', t => { - const bs = new ByteStream() - const opt = (maxMeta || filter || strict) ? { - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', - } : null - const bp = new Parse(opt) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(tardata) - }) - - t.test('uncompressed all at once, filename .tar.br', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tar.br', + + t.test('uncompressed all at once', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, p) + p.end(tardata) }) - trackEvents(t, expect, p) - p.end(tardata) - }) - - t.test('gzipped all at once', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + + t.test( + 'uncompressed one byte at a time, filename .tbr', + t => { + const bs = new ByteStream() + const opt = + maxMeta || filter || strict + ? { + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + } + : undefined + const bp = new Parser(opt) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(tardata) + }, + ) + + t.test('uncompressed all at once, filename .tar.br', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tar.br', + }) + trackEvents(t, expect, p) + p.end(tardata) }) - trackEvents(t, expect, p) - p.end(zlib.gzipSync(tardata)) - }) - - t.test('gzipped all at once, filename .tbr', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', + + t.test('gzipped all at once', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, p) + p.end(zlib.gzipSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.gzipSync(tardata)) - }) - - t.test('gzipped byte at a time', t => { - const bs = new ByteStream() - const bp = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + + t.test('gzipped all at once, filename .tbr', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + }) + trackEvents(t, expect, p) + p.end(zlib.gzipSync(tardata)) }) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(zlib.gzipSync(tardata)) - }) - - t.test('compress with brotli based on filename .tar.br', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tar.br', + + t.test('gzipped byte at a time', t => { + const bs = new ByteStream() + const bp = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(zlib.gzipSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli based on filename .tbr', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', + + t.test( + 'compress with brotli based on filename .tar.br', + t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tar.br', + }) + trackEvents(t, expect, p) + p.end(zlib.brotliCompressSync(tardata)) + }, + ) + + t.test('compress with brotli based on filename .tbr', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + }) + trackEvents(t, expect, p) + p.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli all at once', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - brotli: {}, + + t.test('compress with brotli all at once', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + brotli: {}, + }) + trackEvents(t, expect, p) + p.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, p) - p.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli byte at a time', t => { - const bs = new ByteStream() - const bp = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - brotli: {}, + + t.test('compress with brotli byte at a time', t => { + const bs = new ByteStream() + const bp = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + brotli: {}, + }) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('compress with brotli .tbr byte at a time', t => { - const bs = new ByteStream() - const bp = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, - file: 'example.tbr', + + t.test('compress with brotli .tbr byte at a time', t => { + const bs = new ByteStream() + const bp = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + file: 'example.tbr', + }) + trackEvents(t, expect, bp) + bs.pipe(bp) + bs.end(zlib.brotliCompressSync(tardata)) }) - trackEvents(t, expect, bp) - bs.pipe(bp) - bs.end(zlib.brotliCompressSync(tardata)) - }) - - t.test('async chunks', t => { - const p = new Parse({ - maxMetaEntrySize: maxMeta, - filter: filter ? (path, entry) => entry.size % 2 !== 0 : null, - strict: strict, + + t.test('async chunks', t => { + const p = new Parser({ + maxMetaEntrySize: maxMeta, + filter: filter + ? (_path, entry) => entry.size % 2 !== 0 + : undefined, + strict: strict, + }) + trackEvents(t, expect, p, true) + p.write(tardata.subarray(0, Math.floor(tardata.length / 2))) + process.nextTick(() => + p.end(tardata.subarray(Math.floor(tardata.length / 2))), + ) }) - trackEvents(t, expect, p, true) - p.write(tardata.slice(0, Math.floor(tardata.length / 2))) - process.nextTick(_ => p.end(tardata.slice(Math.floor(tardata.length / 2)))) - }) - t.end() - }) + t.end() + }, + ) } files - .map(f => path.resolve(tardir, f)).forEach(file => + .map(f => path.resolve(tardir, f)) + .forEach(file => maxMetaOpt.forEach(maxMeta => strictOpt.forEach(strict => filterOpt.forEach(filter => - runTest(file, maxMeta, filter, strict))))) + runTest(file, maxMeta, filter, strict), + ), + ), + ), + ) t.end() }) t.test('strict warn with an error emits that error', t => { t.plan(1) - const p = new Parse({ + const p = new Parser({ strict: true, }) p.on('error', emitted => t.equal(emitted, er)) @@ -256,8 +326,8 @@ t.test('strict warn with an error emits that error', t => { t.test('onwarn gets added to the warn event', t => { t.plan(1) - const p = new Parse({ - onwarn (code, message) { + const p = new Parser({ + onwarn(_code, message) { t.equal(message, 'this is fine') }, }) @@ -266,7 +336,7 @@ t.test('onwarn gets added to the warn event', t => { t.test('onentry gets added to entry event', t => { t.plan(1) - const p = new Parse({ + const p = new Parser({ onentry: entry => t.equal(entry, 'yes hello this is dog'), }) p.emit('entry', 'yes hello this is dog') @@ -400,36 +470,56 @@ t.test('drain event timings', t => { ].map(chunks => makeTar(chunks)) const expect = [ - 'one', 'two', 'three', - 'four', 'five', 'six', 'seven', 'eight', - 'four', 'five', 'six', 'seven', 'eight', + 'one', + 'two', + 'three', + 'four', + 'five', + 'six', + 'seven', + 'eight', + 'four', + 'five', + 'six', + 'seven', + 'eight', 'nine', - 'one', 'two', 'three', - 'four', 'five', 'six', 'seven', 'eight', - 'four', 'five', 'six', 'seven', 'eight', + 'one', + 'two', + 'three', + 'four', + 'five', + 'six', + 'seven', + 'eight', + 'four', + 'five', + 'six', + 'seven', + 'eight', 'nine', ] class SlowStream extends EE { - write () { - setTimeout(_ => this.emit('drain')) + write() { + setTimeout(() => this.emit('drain')) return false } - end () { + end() { return this.write() } } let currentEntry const autoPipe = true - const p = new Parse({ + const p = new Parser({ ondone, onentry: entry => { t.equal(entry.path, expect.shift()) currentEntry = entry if (autoPipe) { - setTimeout(_ => entry.pipe(new SlowStream())) + setTimeout(() => entry.pipe(new SlowStream())) } }, }) @@ -441,7 +531,7 @@ t.test('drain event timings', t => { }) let interval - const go = _ => { + const go = () => { const d = data.shift() if (d === undefined) { return p.end() @@ -454,19 +544,21 @@ t.test('drain event timings', t => { } const hunklen = Math.floor(d.length / 2) - const hunks = [ - d.slice(0, hunklen), - d.slice(hunklen), - ] + const hunks = [d.subarray(0, hunklen), d.subarray(hunklen)] p.write(hunks[0]) if (currentEntry && !paused) { - console.error('has current entry') currentEntry.pause() paused = true } - if (!t.equal(p.write(hunks[1]), false, 'write should return false: ' + d)) { + if ( + !t.equal( + p.write(hunks[1]), + false, + 'write should return false: ' + d, + ) + ) { return t.end() } @@ -478,7 +570,7 @@ t.test('drain event timings', t => { } p.once('drain', go) - p.on('end', _ => { + p.on('end', () => { clearInterval(interval) t.ok(sawOndone) t.end() @@ -542,18 +634,18 @@ t.test('consume while consuming', t => { ]) const runTest = (t, size) => { - const p = new Parse() - const first = data.slice(0, size) - const rest = data.slice(size) - p.once('entry', entry => { + const p = new Parser() + const first = data.subarray(0, size) + const rest = data.subarray(size) + p.once('entry', _entry => { for (let pos = 0; pos < rest.length; pos += size) { - p.write(rest.slice(pos, pos + size)) + p.write(rest.subarray(pos, pos + size)) } p.end() }) .on('entry', entry => entry.resume()) - .on('end', _ => t.end()) + .on('end', () => t.end()) .write(first) } @@ -579,7 +671,9 @@ t.test('truncated input', t => { t.test('truncated at block boundary', t => { const warnings = [] - const p = new Parse({ onwarn: (c, message) => warnings.push(message) }) + const p = new Parser({ + onwarn: (_c, message) => warnings.push(message), + }) p.end(data) t.same(warnings, [ 'Truncated input (needed 512 more bytes, only 0 available)', @@ -589,7 +683,9 @@ t.test('truncated input', t => { t.test('truncated mid-block', t => { const warnings = [] - const p = new Parse({ onwarn: (c, message) => warnings.push(message) }) + const p = new Parser({ + onwarn: (_c, message) => warnings.push(message), + }) p.write(data) p.end(Buffer.from('not a full block')) t.same(warnings, [ @@ -617,33 +713,37 @@ t.test('truncated gzip input', t => { '', ]) const tgz = zlib.gzipSync(raw) - const split = Math.floor(tgz.length * 2 / 3) - const trunc = tgz.slice(0, split) + const split = Math.floor((tgz.length * 2) / 3) + const trunc = tgz.subarray(0, split) const skipEarlyEnd = process.version.match(/^v4\./) - t.test('early end', { - skip: skipEarlyEnd ? 'not a zlib error on v4' : false, - }, t => { - const warnings = [] - const p = new Parse() - p.on('error', er => warnings.push(er.message)) - let aborted = false - p.on('abort', _ => aborted = true) - p.end(trunc) - t.equal(aborted, true, 'aborted writing') - t.same(warnings, ['zlib: unexpected end of file']) - t.end() - }) + t.test( + 'early end', + { + skip: skipEarlyEnd ? 'not a zlib error on v4' : false, + }, + t => { + const warnings = [] + const p = new Parser() + p.on('error', er => warnings.push(er.message)) + let aborted = false + p.on('abort', () => (aborted = true)) + p.end(trunc) + t.equal(aborted, true, 'aborted writing') + t.same(warnings, ['zlib: unexpected end of file']) + t.end() + }, + ) t.test('just wrong', t => { const warnings = [] - const p = new Parse() + const p = new Parser() p.on('error', er => warnings.push(er.message)) let aborted = false - p.on('abort', _ => aborted = true) + p.on('abort', () => (aborted = true)) p.write(trunc) p.write(trunc) - p.write(tgz.slice(split)) + p.write(tgz.subarray(split)) p.end() t.equal(aborted, true, 'aborted writing') t.match(warnings, [/^zlib: /]) @@ -655,40 +755,42 @@ t.test('truncated gzip input', t => { t.test('end while consuming', t => { // https://github.com/npm/node-tar/issues/157 - const data = zlib.gzipSync(makeTar([ - { - path: 'package/package.json', - type: 'File', - size: 130, - }, - new Array(131).join('x'), - { - path: 'package/node_modules/@c/d/node_modules/e/package.json', - type: 'File', - size: 30, - }, - new Array(31).join('e'), - { - path: 'package/node_modules/@c/d/package.json', - type: 'File', - size: 33, - }, - new Array(34).join('d'), - { - path: 'package/node_modules/a/package.json', - type: 'File', - size: 59, - }, - new Array(60).join('a'), - { - path: 'package/node_modules/b/package.json', - type: 'File', - size: 30, - }, - new Array(31).join('b'), - '', - '', - ])) + const data = zlib.gzipSync( + makeTar([ + { + path: 'package/package.json', + type: 'File', + size: 130, + }, + new Array(131).join('x'), + { + path: 'package/node_modules/@c/d/node_modules/e/package.json', + type: 'File', + size: 30, + }, + new Array(31).join('e'), + { + path: 'package/node_modules/@c/d/package.json', + type: 'File', + size: 33, + }, + new Array(34).join('d'), + { + path: 'package/node_modules/a/package.json', + type: 'File', + size: 59, + }, + new Array(60).join('a'), + { + path: 'package/node_modules/b/package.json', + type: 'File', + size: 30, + }, + new Array(31).join('b'), + '', + '', + ]), + ) const actual = [] const expect = [ @@ -700,7 +802,7 @@ t.test('end while consuming', t => { ] const mp = new Minipass() - const p = new Parse({ + const p = new Parser({ onentry: entry => { actual.push(entry.path) entry.resume() @@ -716,7 +818,7 @@ t.test('end while consuming', t => { }) t.test('bad archives', t => { - const p = new Parse() + const p = new Parser() const warnings = [] p.on('warn', (code, msg, data) => { warnings.push([code, msg, data]) @@ -735,8 +837,8 @@ t.test('bad archives', t => { }) t.test('header that throws', t => { - const p = new Parse() - p.on('warn', (c, m, d) => { + const p = new Parser() + p.on('warn', (_c, m, d) => { t.equal(m, 'invalid base256 encoding') t.match(d, { code: 'TAR_ENTRY_INVALID', @@ -753,14 +855,19 @@ t.test('header that throws', t => { }) h.encode() const buf = h.block - const bad = Buffer.from([0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]) + const bad = Buffer.from([ + 0x81, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + ]) bad.copy(buf, 100) - t.throws(() => new Header(buf), 'the header with that buffer throws') + t.throws( + () => new Header(buf), + 'the header with that buffer throws', + ) p.write(buf) }) t.test('warnings that are not so bad', t => { - const p = new Parse() + const p = new Parser() const warnings = [] p.on('warn', (code, m, d) => { warnings.push([code, m, d]) @@ -768,7 +875,7 @@ t.test('warnings that are not so bad', t => { }) // the parser doesn't actually decide what's "ok" or "supported", // it just parses. So we have to set it ourselves like unpack does - p.once('entry', entry => entry.invalid = true) + p.once('entry', entry => (entry.invalid = true)) p.on('entry', entry => entry.resume()) const data = makeTar([ { diff --git a/test/path-reservations.js b/test/path-reservations.js index 9a1d7a77..6d9f7452 100644 --- a/test/path-reservations.js +++ b/test/path-reservations.js @@ -1,19 +1,25 @@ -const t = require('tap') +import t from 'tap' + +import { posix, win32 } from 'node:path' // load up the posix and windows versions of the reserver if (process.platform === 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = 'posix' } -const { reserve } = t.mock('../lib/path-reservations.js', { - path: require('path').posix, -})() + + +const { PathReservations } = await t.mockImport('../dist/esm/path-reservations.js', { + path: posix, +}) + delete process.env.TESTING_TAR_FAKE_PLATFORM if (process.platform !== 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' } -const { reserve: winReserve } = t.mock('../lib/path-reservations.js', { - path: require('path').win32, -})() + +const { PathReservations: WinPathReservations } = await t.mockImport('../dist/esm/path-reservations.js', { + path: win32, +}) t.test('basic race', t => { // simulate the race conditions we care about @@ -62,11 +68,13 @@ t.test('basic race', t => { t.end() } - t.ok(reserve(['a/b/c/d'], file), 'file starts right away') - t.notOk(reserve(['a/B/c////D', 'a/b/e'], link), 'link waits') - t.notOk(reserve(['a/b/e/f'], dir), 'dir waits') - t.notOk(reserve(['a/b'], dir2), 'dir2 waits') - t.notOk(reserve(['a/b/x'], dir3), 'dir3 waits') + const r = new PathReservations() + + t.ok(r.reserve(['a/b/c/d'], file), 'file starts right away') + t.notOk(r.reserve(['a/B/c////D', 'a/b/e'], link), 'link waits') + t.notOk(r.reserve(['a/b/e/f'], dir), 'dir waits') + t.notOk(r.reserve(['a/b'], dir2), 'dir2 waits') + t.notOk(r.reserve(['a/b/x'], dir3), 'dir3 waits') }) t.test('unicode shenanigans', t => { @@ -89,8 +97,9 @@ t.test('unicode shenanigans', t => { } const cafePath1 = `c/a/f/${e1}` const cafePath2 = `c/a/f/${e2}` - t.ok(reserve([cafePath1], cafe1)) - t.notOk(reserve([cafePath2], cafe2)) + const r = new PathReservations() + t.ok(r.reserve([cafePath1], cafe1)) + t.notOk(r.reserve([cafePath2], cafe2)) }) t.test('absolute paths and trailing slash', t => { @@ -128,14 +137,15 @@ t.test('absolute paths and trailing slash', t => { t.end() } } - t.ok(reserve(['/p/a/t/h'], a1)) - t.notOk(reserve(['/p/a/t/h/'], a2)) - t.ok(reserve(['p/a/t/h'], r1)) - t.notOk(reserve(['p/a/t/h/'], r2)) + const r = new PathReservations() + t.ok(r.reserve(['/p/a/t/h'], a1)) + t.notOk(r.reserve(['/p/a/t/h/'], a2)) + t.ok(r.reserve(['p/a/t/h'], r1)) + t.notOk(r.reserve(['p/a/t/h/'], r2)) }) t.test('on windows, everything collides with everything', t => { - const reserve = winReserve + const r = new WinPathReservations() let called1 = false let called2 = false const f1 = done => { @@ -151,6 +161,6 @@ t.test('on windows, everything collides with everything', t => { done() t.end() } - t.equal(reserve(['some/path'], f1), true) - t.equal(reserve(['other/path'], f2), false) + t.equal(r.reserve(['some/path'], f1), true) + t.equal(r.reserve(['other/path'], f2), false) }) diff --git a/test/pax.js b/test/pax.js index cef9fc51..eb7f9260 100644 --- a/test/pax.js +++ b/test/pax.js @@ -1,6 +1,5 @@ -'use strict' -const t = require('tap') -const Pax = require('../lib/pax.js') +import t from 'tap' +import { Pax } from '../dist/esm/pax.js' t.test('create a pax', t => { const p = new Pax({ @@ -18,8 +17,6 @@ t.test('create a pax', t => { nlink: 1, }) - // console.log(p.encode().toString('hex').split('').reduce((s,c)=>{if(s[s.length-1].length<64)s[s.length-1]+=c;else s.push(c);return s},[''])) - const buf = Buffer.from( // pax entry header '5061784865616465722f666f6f2e747874000000000000000000000000000000' + @@ -59,13 +56,13 @@ t.test('create a pax', t => { 'hex') const actual = p.encode() - t.equal(actual.toString('hex'), buf.toString('hex')) + t.match(actual, buf) t.end() }) t.test('null pax', t => { const p = new Pax({}) - t.equal(p.encode(), null) + t.same(p.encode(), Buffer.allocUnsafe(0)) t.end() }) @@ -74,8 +71,6 @@ t.test('tiny pax', t => { // an error? const p = new Pax({ path: 'ab' }, true) const actual = p.encode() - // console.log(actual.toString('hex').split('').reduce((s,c)=>{if(s[s.length-1].length<64)s[s.length-1]+=c;else s.push(c);return s},[''])) - // return Promise.resolve() const buf = Buffer.from( // header @@ -120,104 +115,112 @@ t.test('tiny pax', t => { '0000000000000000000000000000000000000000000000000000000000000000', 'hex') - t.equal(actual.toString('hex'), buf.toString('hex')) + t.same(actual, buf) t.end() }) t.test('parse', t => { - t.same(Pax.parse('11 path=ab\n', { uid: 24561 }, true), { - atime: null, - charset: null, - comment: null, - ctime: null, - gid: null, - gname: null, - linkpath: null, - mtime: null, + const p = Pax.parse('11 path=ab\n', { uid: 24561 }, true) + t.same(p, Object.assign(Object.create(Pax.prototype), { + atime: undefined, + mode: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, + gid: undefined, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'ab', - size: null, + size: undefined, uid: 24561, - uname: null, - dev: null, - ino: null, - nlink: null, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, global: true, - }) + })) - t.same(Pax.parse('11 path=ab\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, - gid: null, - gname: null, - linkpath: null, - mtime: null, + t.same(Pax.parse('11 path=ab\n'), Object.assign(Object.create(Pax.prototype), { + atime: undefined, + mtime: undefined, + ctime: undefined, + charset: undefined, + comment: undefined, + gid: undefined, + gname: undefined, + uname: undefined, + linkpath: undefined, path: 'ab', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + mode: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, global: false, - }) + })) - t.same(Pax.parse('9 gid=20\n9 path=x\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, + t.same(Pax.parse('9 gid=20\n9 path=x\n'), { + atime: undefined, + mtime: undefined, + ctime: undefined, + charset: undefined, + comment: undefined, gid: 20, - gname: null, - linkpath: null, - mtime: null, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'x', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + mode: undefined, global: false, }) - t.same(Pax.parse('9 gid=20\n9 path=x\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, + t.same(Pax.parse('9 gid=20\n9 path=x\n'), { + atime: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, gid: 20, - gname: null, - linkpath: null, - mtime: null, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'x', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + mode: undefined, + nlink: undefined, global: false, }) - t.same(Pax.parse('20 mtime=1491436800\n', null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, - gid: null, - gname: null, - linkpath: null, + t.same(Pax.parse('20 mtime=1491436800\n'), { + atime: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, + gid: undefined, + gname: undefined, + linkpath: undefined, mtime: new Date('2017-04-06'), - path: null, - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + path: undefined, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + mode: undefined, global: false, }) @@ -230,22 +233,23 @@ t.test('parse', t => { const noKey = '10 =pathx\n' - t.same(Pax.parse(breaky + '9 gid=20\n10 path=x\n' + noKey, null, false), { - atime: null, - charset: null, - comment: null, - ctime: null, + t.same(Pax.parse(breaky + '9 gid=20\n10 path=x\n' + noKey), { + atime: undefined, + charset: undefined, + comment: undefined, + ctime: undefined, gid: 20, - gname: null, - linkpath: null, - mtime: null, + gname: undefined, + linkpath: undefined, + mtime: undefined, path: 'x', - size: null, - uid: null, - uname: null, - dev: null, - ino: null, - nlink: null, + size: undefined, + uid: undefined, + uname: undefined, + dev: undefined, + ino: undefined, + nlink: undefined, + mode: undefined, global: false, }) diff --git a/test/read-entry.js b/test/read-entry.js index 4e12e87a..6d690390 100644 --- a/test/read-entry.js +++ b/test/read-entry.js @@ -1,7 +1,6 @@ -'use strict' -const t = require('tap') -const ReadEntry = require('../lib/read-entry.js') -const Header = require('../lib/header.js') +import t from 'tap' +import { ReadEntry } from '../dist/esm/read-entry.js' +import { Header } from '../dist/esm/header.js' t.test('create read entry', t => { const h = new Header({ @@ -19,7 +18,11 @@ t.test('create read entry', t => { }) h.encode() - const entry = new ReadEntry(h, { x: 'y', path: 'foo.txt' }, { z: 0, a: null, b: undefined }) + const entry = new ReadEntry( + h, + { x: 'y', path: 'foo.txt' }, + { z: 0, a: null, b: undefined }, + ) t.ok(entry.header.cksumValid, 'header checksum should be valid') @@ -67,8 +70,8 @@ t.test('create read entry', t => { let data = '' let ended = false - entry.on('data', c => data += c) - entry.on('end', _ => ended = true) + entry.on('data', c => (data += c)) + entry.on('end', _ => (ended = true)) const body = Buffer.alloc(512) body.write(new Array(101).join('z'), 0) @@ -81,6 +84,85 @@ t.test('create read entry', t => { t.end() }) +t.test('entry with extended linkpath', t => { + const h = new Header({ + path: 'oof.txt', + mode: 0o755, + uid: 24561, + gid: 20, + size: 0, + mtime: new Date('2016-04-01T22:00Z'), + ctime: new Date('2016-04-01T22:00Z'), + atime: new Date('2016-04-01T22:00Z'), + type: 'SymbolicLink', + uname: 'isaacs', + gname: 'staff', + }) + h.encode() + + const entry = new ReadEntry( + h, + { x: 'y', linkpath: 'bar.txt', path: 'foo.txt' }, + { z: 0, a: null, b: undefined }, + ) + + t.ok(entry.header.cksumValid, 'header checksum should be valid') + + t.match(entry, { + extended: { x: 'y', path: 'foo.txt', linkpath: 'bar.txt' }, + globalExtended: { z: 0, a: null, b: undefined }, + header: { + cksumValid: true, + needPax: false, + path: 'oof.txt', + mode: 0o755, + uid: 24561, + gid: 20, + size: 0, + mtime: new Date('2016-04-01T22:00:00.000Z'), + typeKey: '2', + type: 'SymbolicLink', + linkpath: null, + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + atime: new Date('2016-04-01T22:00:00.000Z'), + ctime: new Date('2016-04-01T22:00:00.000Z'), + }, + blockRemain: 0, + remain: 0, + type: 'SymbolicLink', + meta: false, + ignore: false, + path: 'foo.txt', + mode: 0o755, + uid: 24561, + gid: 20, + uname: 'isaacs', + gname: 'staff', + size: 0, + mtime: new Date('2016-04-01T22:00:00.000Z'), + atime: new Date('2016-04-01T22:00:00.000Z'), + ctime: new Date('2016-04-01T22:00:00.000Z'), + linkpath: 'bar.txt', + x: 'y', + z: 0, + }) + + let data = '' + entry.on('data', c => (data += c)) + + const body = Buffer.alloc(512) + body.write(new Array(101).join('z'), 0) + t.throws(() => entry.write(body)) + entry.end() + + t.equal(data, '') + + t.end() +}) + t.test('meta entry', t => { const h = new Header({ path: 'PaxHeader/foo.txt', @@ -102,11 +184,11 @@ t.test('meta entry', t => { let actual = '' const entry = new ReadEntry(h) - entry.on('data', c => actual += c) + entry.on('data', c => (actual += c)) - entry.write(body.slice(0, 1)) - entry.write(body.slice(1, 25)) - entry.write(body.slice(25)) + entry.write(body.subarray(0, 1)) + entry.write(body.subarray(1, 25)) + entry.write(body.subarray(25)) t.throws(_ => entry.write(Buffer.alloc(1024))) t.equal(actual, expect) @@ -128,6 +210,8 @@ t.test('unknown entry type', t => { gname: 'staff', }) h.encode() + // this triggers its type to be Unsupported, which means that any + // data written to it will be thrown away. h.block.write('9', 156, 1, 'ascii') const body = Buffer.alloc(512) @@ -138,12 +222,12 @@ t.test('unknown entry type', t => { const entry = new ReadEntry(new Header(h.block)) - entry.on('data', c => actual += c) + entry.on('data', c => (actual += c)) - entry.write(body.slice(0, 1)) - entry.write(body.slice(1, 25)) - entry.write(body.slice(25)) - t.throws(_ => entry.write(Buffer.alloc(1024))) + entry.write(body.subarray(0, 1)) + entry.write(body.subarray(1, 25)) + entry.write(body.subarray(25)) + t.throws(() => entry.write(Buffer.alloc(1024))) t.equal(actual, expect) t.match(entry, { ignore: true }) @@ -209,8 +293,8 @@ t.test('entry without mode', t => { let data = '' let ended = false - entry.on('data', c => data += c) - entry.on('end', _ => ended = true) + entry.on('data', c => (data += c)) + entry.on('end', _ => (ended = true)) const body = Buffer.alloc(512) body.write(new Array(101).join('z'), 0) diff --git a/test/replace.js b/test/replace.js index 75c97027..22af74d7 100644 --- a/test/replace.js +++ b/test/replace.js @@ -1,25 +1,26 @@ -'use strict' -const t = require('tap') -const r = require('../lib/replace.js') -const path = require('path') -const fs = require('fs') -const mutateFS = require('mutate-fs') -const list = require('../lib/list.js') -const { resolve } = require('path') - +import t from 'tap' +import { replace as r } from '../dist/esm/replace.js' +import path, {dirname, resolve } from 'path' +import fs from 'fs' +import mutateFS from 'mutate-fs' +import { list } from '../dist/esm/list.js' +import {fileURLToPath} from 'url' +import zlib from 'zlib' +import { spawn } from 'child_process' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') -const zlib = require('zlib') -const spawn = require('child_process').spawn const data = fs.readFileSync(tars + '/body-byte-counts.tar') -const dataNoNulls = data.slice(0, data.length - 1024) +const dataNoNulls = data.subarray(0, data.length - 1024) const fixtureDef = { 'body-byte-counts.tar': data, 'no-null-eof.tar': dataNoNulls, - 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), - 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'truncated-head.tar': Buffer.concat([dataNoNulls, data.subarray(0, 500)]), + 'truncated-body.tar': Buffer.concat([dataNoNulls, data.subarray(0, 700)]), 'zero.tar': Buffer.from(''), 'empty.tar': Buffer.alloc(512), 'compressed.tgz': zlib.gzipSync(data), @@ -318,7 +319,7 @@ t.test('mtime cache', async t => { path.basename(__filename), ]) const mtc = {} - mtimeCache.forEach((v, k) => mtc[k] = mtimeCache.get(k).toISOString()) + mtimeCache.forEach((_v, k) => mtc[k] = mtimeCache.get(k).toISOString()) t.same(mtc, { '1024-bytes.txt': '2017-04-10T16:57:47.000Z', '512-bytes.txt': '2017-04-10T17:08:55.000Z', diff --git a/test/strip-absolute-path.js b/test/strip-absolute-path.js index 3e871a9f..59529d6f 100644 --- a/test/strip-absolute-path.js +++ b/test/strip-absolute-path.js @@ -1,5 +1,7 @@ -const t = require('tap') -const stripAbsolutePath = require('../lib/strip-absolute-path.js') +import t from 'tap' +import { stripAbsolutePath } from '../dist/esm/strip-absolute-path.js' +import realPath from 'node:path' + const cwd = process.cwd() t.test('basic', t => { @@ -9,34 +11,46 @@ t.test('basic', t => { 'c:///a/b/c': ['c:///', 'a/b/c'], '\\\\foo\\bar\\baz': ['\\\\foo\\bar\\', 'baz'], '//foo//bar//baz': ['//', 'foo//bar//baz'], - 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': ['c:\\c:\\c:\\c:\\\\d:\\', 'e/f/g'], + 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': [ + 'c:\\c:\\c:\\c:\\\\d:\\', + 'e/f/g', + ], } for (const [input, [root, stripped]] of Object.entries(cases)) { - t.strictSame(stripAbsolutePath(input, cwd), [root, stripped], input) + t.strictSame( + stripAbsolutePath(input, cwd), + [root, stripped], + input, + ) } t.end() }) -t.test('drive-local paths', t => { +t.test('drive-local paths', async t => { const env = process.env - t.teardown(() => process.env = env) + t.teardown(() => (process.env = env)) const cwd = 'D:\\safety\\land' - const realPath = require('path') // be windowsy const path = { ...realPath.win32, win32: realPath.win32, posix: realPath.posix, } - const stripAbsolutePath = t.mock('../lib/strip-absolute-path.js', { path }) + const { stripAbsolutePath } = await t.mockImport( + '../dist/esm/strip-absolute-path.js', + { path }, + ) const cases = { '/': ['/', ''], '////': ['////', ''], 'c:///a/b/c': ['c:///', 'a/b/c'], '\\\\foo\\bar\\baz': ['\\\\foo\\bar\\', 'baz'], '//foo//bar//baz': ['//', 'foo//bar//baz'], - 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': ['c:\\c:\\c:\\c:\\\\d:\\', 'e/f/g'], + 'c:\\c:\\c:\\c:\\\\d:\\e/f/g': [ + 'c:\\c:\\c:\\c:\\\\d:\\', + 'e/f/g', + ], 'c:..\\system\\explorer.exe': ['c:', '..\\system\\explorer.exe'], 'd:..\\..\\unsafe\\land': ['d:', '..\\..\\unsafe\\land'], 'c:foo': ['c:', 'foo'], @@ -45,7 +59,13 @@ t.test('drive-local paths', t => { '\\\\?\\X:\\y\\z': ['\\\\?\\X:\\', 'y\\z'], } for (const [input, [root, stripped]] of Object.entries(cases)) { - if (!t.strictSame(stripAbsolutePath(input, cwd), [root, stripped], input)) { + if ( + !t.strictSame( + stripAbsolutePath(input, cwd), + [root, stripped], + input, + ) + ) { break } } diff --git a/test/strip-trailing-slashes.js b/test/strip-trailing-slashes.js index ce0695f8..97f8a16f 100644 --- a/test/strip-trailing-slashes.js +++ b/test/strip-trailing-slashes.js @@ -1,8 +1,8 @@ -const t = require('tap') -const stripSlash = require('../lib/strip-trailing-slashes.js') +import t from 'tap' +import { stripTrailingSlashes } from '../dist/esm/strip-trailing-slashes.js' const short = '///a///b///c///' const long = short.repeat(10) + '/'.repeat(1000000) -t.equal(stripSlash('no slash'), 'no slash') -t.equal(stripSlash(short), '///a///b///c') -t.equal(stripSlash(long), short.repeat(9) + '///a///b///c') +t.equal(stripTrailingSlashes('no slash'), 'no slash') +t.equal(stripTrailingSlashes(short), '///a///b///c') +t.equal(stripTrailingSlashes(long), short.repeat(9) + '///a///b///c') diff --git a/test/symlink-error.js b/test/symlink-error.js new file mode 100644 index 00000000..92a71bd3 --- /dev/null +++ b/test/symlink-error.js @@ -0,0 +1,11 @@ +import t from 'tap' +import { SymlinkError } from '../dist/esm/symlink-error.js' + +t.match(new SymlinkError('symlink', 'path'), { + name: 'SymlinkError', + path: 'path', + symlink: 'symlink', + syscall: 'symlink', + code: 'TAR_SYMLINK_ERROR', + message: 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', +}) diff --git a/test/types.js b/test/types.js index c2ca5f67..93425d39 100644 --- a/test/types.js +++ b/test/types.js @@ -1,6 +1,8 @@ -'use strict' -// not much to test here, just 2 maps. -const t = require('tap') -const types = require('../lib/types.js') +import t from 'tap' +import * as types from '../dist/esm/types.js' t.equal(types.name.get('0'), 'File') t.equal(types.code.get('File'), '0') +t.equal(types.isCode('0'), true) +t.equal(types.isCode('Z'), false) +t.equal(types.isName('TapeVolumeHeader'), true) +t.equal(types.isName('Unsupported'), false) diff --git a/test/unpack.js b/test/unpack.js index 2f1d3026..51ba1220 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -1,35 +1,38 @@ -'use strict' - process.umask(0o022) -const Unpack = require('../lib/unpack.js') -const UnpackSync = Unpack.Sync -const t = require('tap') -const { Minipass } = require('minipass') +import { Unpack, UnpackSync } from '../dist/esm/unpack.js' + +import fs from 'fs' +import { Minipass } from 'minipass' +import * as z from 'minizlib' +import path from 'path' +import { rimraf } from 'rimraf' +import t from 'tap' +import { fileURLToPath } from 'url' +import { Header } from '../dist/esm/header.js' +import { makeTar } from './fixtures/make-tar.js' -const makeTar = require('./make-tar.js') -const Header = require('../lib/header.js') -const z = require('minizlib') -const fs = require('fs') -const path = require('path') +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') const parses = path.resolve(fixtures, 'parse') const unpackdir = path.resolve(fixtures, 'unpack') -const { promisify } = require('util') -const rimraf = promisify(require('rimraf')) -const mkdirp = require('mkdirp') -const mutateFS = require('mutate-fs') -const eos = require('end-of-stream') -const normPath = require('../lib/normalize-windows-path.js') -const ReadEntry = require('../lib/read-entry.js') + +import eos from 'end-of-stream' +import { mkdirp } from 'mkdirp' +import mutateFS from 'mutate-fs' +import { normalizeWindowsPath as normPath } from '../dist/esm/normalize-windows-path.js' + +import { ReadEntry } from '../dist/esm/read-entry.js' // On Windows in particular, the "really deep folder path" file // often tends to cause problems, which don't indicate a failure // of this library, it's just what happens on Windows with super // long file paths. const isWindows = process.platform === 'win32' -const isLongFile = f => f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) +const isLongFile = f => + f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) t.teardown(_ => rimraf(unpackdir)) @@ -56,7 +59,8 @@ t.test('basic file unpack tests', t => { 'utf8.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', 'Ω.txt': 'Ω', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': 'Ω', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': + 'Ω', }, 'file.tar': { 'one-byte.txt': 'a', @@ -65,17 +69,26 @@ t.test('basic file unpack tests', t => { 'one-byte.txt': 'a', }, 'long-pax.tar': { - '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', }, 'long-paths.tar': { - '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': 'Ω', + '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '120-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + '170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': + 'short\n', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': + 'Ω', }, } @@ -199,8 +212,10 @@ t.test('links!', t => { t.test('async', t => { const unpack = new Unpack({ cwd: dir }) let finished = false - unpack.on('finish', _ => finished = true) - unpack.on('close', _ => t.ok(finished, 'emitted finish before close')) + unpack.on('finish', _ => (finished = true)) + unpack.on('close', _ => + t.ok(finished, 'emitted finish before close'), + ) unpack.on('close', _ => check(t)) unpack.end(data) }) @@ -220,8 +235,10 @@ t.test('links!', t => { t.test('async strip', t => { const unpack = new Unpack({ cwd: dir, strip: 1 }) let finished = false - unpack.on('finish', _ => finished = true) - unpack.on('close', _ => t.ok(finished, 'emitted finish before close')) + unpack.on('finish', _ => (finished = true)) + unpack.on('close', _ => + t.ok(finished, 'emitted finish before close'), + ) unpack.on('close', _ => checkForStrip(t)) unpack.end(stripData) }) @@ -235,8 +252,10 @@ t.test('links!', t => { t.test('async strip 3', t => { const unpack = new Unpack({ cwd: dir, strip: 3 }) let finished = false - unpack.on('finish', _ => finished = true) - unpack.on('close', _ => t.ok(finished, 'emitted finish before close')) + unpack.on('finish', _ => (finished = true)) + unpack.on('close', _ => + t.ok(finished, 'emitted finish before close'), + ) unpack.on('close', _ => checkForStrip3(t)) unpack.end(stripData) }) @@ -277,9 +296,10 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async', t => { const unpack = new Unpack({ cwd: dir }) let prefinished = false - unpack.on('prefinish', _ => prefinished = true) + unpack.on('prefinish', _ => (prefinished = true)) unpack.on('finish', _ => - t.ok(prefinished, 'emitted prefinish before finish')) + t.ok(prefinished, 'emitted prefinish before finish'), + ) unpack.on('close', _ => check(t)) unpack.end(data) }) @@ -321,9 +341,12 @@ t.test('nested dir dupe', t => { t.teardown(_ => rimraf(dir)) const expect = { 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', - 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': 'Ω', } @@ -347,274 +370,346 @@ t.test('nested dir dupe', t => { zip.end(data) }) -t.test('symlink in dir path', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const dir = path.resolve(unpackdir, 'symlink-junk') +t.test( + 'symlink in dir path', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const dir = path.resolve(unpackdir, 'symlink-junk') - t.teardown(_ => rimraf(dir)) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) + t.teardown(_ => rimraf(dir)) + t.beforeEach(async () => { + await rimraf(dir) + await mkdirp(dir) + }) - const data = makeTar([ - { - path: 'd/i', - type: 'Directory', - }, - { - path: 'd/i/r/dir', - type: 'Directory', - mode: 0o751, - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - { - path: 'd/i/r/file', - type: 'File', - size: 1, - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - }, - 'a', - { - path: 'd/i/r/link', - type: 'Link', - linkpath: 'd/i/r/file', - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - { - path: 'd/i/r/symlink', - type: 'SymbolicLink', - linkpath: './dir', - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - { - path: 'd/i/r/symlink/x', - type: 'File', - size: 0, - atime: new Date('1979-07-01T19:10:00.000Z'), - ctime: new Date('2011-03-27T22:16:31.000Z'), - mtime: new Date('2011-03-27T22:16:31.000Z'), - }, - '', - '', - ]) + const data = makeTar([ + { + path: 'd/i', + type: 'Directory', + }, + { + path: 'd/i/r/dir', + type: 'Directory', + mode: 0o751, + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + { + path: 'd/i/r/file', + type: 'File', + size: 1, + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + }, + 'a', + { + path: 'd/i/r/link', + type: 'Link', + linkpath: 'd/i/r/file', + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + { + path: 'd/i/r/symlink', + type: 'SymbolicLink', + linkpath: './dir', + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + { + path: 'd/i/r/symlink/x', + type: 'File', + size: 0, + atime: new Date('1979-07-01T19:10:00.000Z'), + ctime: new Date('2011-03-27T22:16:31.000Z'), + mtime: new Date('2011-03-27T22:16:31.000Z'), + }, + '', + '', + ]) - t.test('no clobbering', t => { - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), + t.test('no clobbering', t => { + const warnings = [] + const u = new Unpack({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + }) + u.on('close', _ => { + t.equal( + fs.lstatSync(dir + '/d/i').mode & 0o7777, + isWindows ? 0o666 : 0o755, + ) + t.equal( + fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, + isWindows ? 0o666 : 0o751, + ) + t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + if (!isWindows) { + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + } + t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + if (!isWindows) { + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) + t.match(warnings[0][2], { + name: 'SymlinkError', + code: 'TAR_SYMLINK_ERROR', + tarCode: 'TAR_ENTRY_ERROR', + path: dir + '/d/i/r/symlink/', + symlink: dir + '/d/i/r/symlink', + }) + } + t.equal(warnings.length, 1) + t.end() + }) + u.end(data) }) - u.on('close', _ => { - t.equal(fs.lstatSync(dir + '/d/i').mode & 0o7777, isWindows ? 0o666 : 0o755) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) + + t.test('no clobbering, sync', t => { + const warnings = [] + const u = new UnpackSync({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + }) + u.end(data) + t.equal( + fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, + isWindows ? 0o666 : 0o751, + ) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') if (!isWindows) { - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) } - t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - if (!isWindows) { - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', - }) - } t.equal(warnings.length, 1) + t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) + t.match(warnings[0][2], { + name: 'SymlinkError', + path: dir + '/d/i/r/symlink/', + symlink: dir + '/d/i/r/symlink', + }) t.end() }) - u.end(data) - }) - t.test('no clobbering, sync', t => { - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - }) - u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - if (!isWindows) { - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) - } - t.equal(warnings.length, 1) - t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + t.test('extract through symlink', t => { + const warnings = [] + const u = new Unpack({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + preservePaths: true, + }) + u.on('close', _ => { + t.same(warnings, []) + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.ok( + fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), + 'x thru link', + ) + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) + t.end() + }) + u.end(data) }) - t.end() - }) - t.test('extract through symlink', t => { - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - preservePaths: true, - }) - u.on('close', _ => { + t.test('extract through symlink sync', t => { + const warnings = [] + const u = new UnpackSync({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + preservePaths: true, + }) + u.end(data) t.same(warnings, []) t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) t.ok(fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), 'x thru link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) t.end() }) - u.end(data) - }) - t.test('extract through symlink sync', t => { - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - preservePaths: true, + t.test('clobber through symlink', t => { + const warnings = [] + const u = new Unpack({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + unlink: true, + }) + u.on('close', _ => { + t.same(warnings, []) + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.notOk( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'no link', + ) + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), + 'sym is dir', + ) + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) + t.end() + }) + u.end(data) }) - u.end(data) - t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.ok(fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), 'x thru link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') - t.end() - }) - t.test('clobber through symlink', t => { - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - unlink: true, + t.test('clobber through symlink with busted unlink', t => { + const poop = new Error('poop') + // for some reason, resetting fs.unlink in the teardown was breaking + const reset = mutateFS.fail('unlink', poop) + const warnings = [] + const u = new Unpack({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + unlink: true, + }) + u.on('close', _ => { + t.same(warnings, [['TAR_ENTRY_ERROR', 'poop', poop]]) + reset() + t.end() + }) + u.end(data) }) - u.on('close', _ => { - t.same(warnings, []) + + t.test('clobber through symlink sync', t => { + const warnings = [] + const u = new UnpackSync({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + unlink: true, + }) + u.end(data) t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.notOk(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'no link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), 'sym is dir') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') + t.notOk( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'no link', + ) + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), + 'sym is dir', + ) + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + 'x thru link', + ) t.end() }) - u.end(data) - }) - t.test('clobber through symlink with busted unlink', t => { - const poop = new Error('poop') - // for some reason, resetting fs.unlink in the teardown was breaking - const reset = mutateFS.fail('unlink', poop) - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - unlink: true, - }) - u.on('close', _ => { - t.same(warnings, [['TAR_ENTRY_ERROR', 'poop', poop]]) - reset() - t.end() - }) - u.end(data) - }) - - t.test('clobber through symlink sync', t => { - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => warnings.push([c, w, d]), - unlink: true, + t.test('clobber dirs', t => { + mkdirp.sync(dir + '/d/i/r/dir') + mkdirp.sync(dir + '/d/i/r/file') + mkdirp.sync(dir + '/d/i/r/link') + mkdirp.sync(dir + '/d/i/r/symlink') + const warnings = [] + const u = new Unpack({ + cwd: dir, + onwarn: (c, w, d) => { + warnings.push([c, w, d]) + }, + }) + u.on('close', _ => { + t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) + t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + t.equal(warnings.length, 1) + t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) + t.match(warnings[0][2], { + name: 'SymlinkError', + path: dir + '/d/i/r/symlink/', + symlink: dir + '/d/i/r/symlink', + }) + t.end() + }) + u.end(data) }) - u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.notOk(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'no link') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), 'sym is dir') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), 'x thru link') - t.end() - }) - t.test('clobber dirs', t => { - mkdirp.sync(dir + '/d/i/r/dir') - mkdirp.sync(dir + '/d/i/r/file') - mkdirp.sync(dir + '/d/i/r/link') - mkdirp.sync(dir + '/d/i/r/symlink') - const warnings = [] - const u = new Unpack({ - cwd: dir, - onwarn: (c, w, d) => { - warnings.push([c, w, d]) - }, - }) - u.on('close', _ => { + t.test('clobber dirs sync', t => { + mkdirp.sync(dir + '/d/i/r/dir') + mkdirp.sync(dir + '/d/i/r/file') + mkdirp.sync(dir + '/d/i/r/link') + mkdirp.sync(dir + '/d/i/r/symlink') + const warnings = [] + const u = new UnpackSync({ + cwd: dir, + onwarn: (c, w, d) => { + warnings.push([c, w, d]) + }, + }) + u.end(data) t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') + t.ok( + fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + 'got symlink', + ) t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') + t.equal( + warnings[0][1], + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ) t.match(warnings[0][2], { - name: 'SylinkError', + name: 'SymlinkError', path: dir + '/d/i/r/symlink/', symlink: dir + '/d/i/r/symlink', }) t.end() }) - u.end(data) - }) - t.test('clobber dirs sync', t => { - mkdirp.sync(dir + '/d/i/r/dir') - mkdirp.sync(dir + '/d/i/r/file') - mkdirp.sync(dir + '/d/i/r/link') - mkdirp.sync(dir + '/d/i/r/symlink') - const warnings = [] - const u = new UnpackSync({ - cwd: dir, - onwarn: (c, w, d) => { - warnings.push([c, w, d]) - }, - }) - u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') - t.ok(fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink') - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) - t.equal(warnings.length, 1) - t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') - t.equal(warnings[0][1], 'Cannot extract through symbolic link') - t.match(warnings[0][2], { - name: 'SylinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', - }) t.end() - }) - - t.end() -}) + }, +) t.test('unsupported entries', t => { const dir = path.resolve(unpackdir, 'unsupported-entries') mkdirp.sync(dir) t.teardown(_ => rimraf(dir)) - const unknown = new Header({ path: 'qux', type: 'File', size: 4 }) - unknown.type = 'Z' + const unknown = new Header({ path: 'qux', size: 4 }) unknown.encode() + unknown.block?.write('Z', 156) const data = makeTar([ { path: 'dev/random', @@ -639,15 +734,33 @@ t.test('unsupported entries', t => { t.test('basic, warns', t => { const warnings = [] - const u = new Unpack({ cwd: dir, onwarn: (c, w, d) => warnings.push([c, w, d]) }) + const u = new Unpack({ + cwd: dir, + onwarn: (c, w, d) => warnings.push([c, w, d]), + }) const c = 'TAR_ENTRY_UNSUPPORTED' const expect = [ - [c, 'unsupported entry type: CharacterDevice', { - entry: { path: 'dev/random' } }], - [c, 'unsupported entry type: BlockDevice', { - entry: { path: 'dev/hd0' } }], - [c, 'unsupported entry type: FIFO', { - entry: { path: 'dev/fifo0' } }], + [ + c, + 'unsupported entry type: CharacterDevice', + { + entry: { path: 'dev/random' }, + }, + ], + [ + c, + 'unsupported entry type: BlockDevice', + { + entry: { path: 'dev/hd0' }, + }, + ], + [ + c, + 'unsupported entry type: FIFO', + { + entry: { path: 'dev/fifo0' }, + }, + ], ] u.on('close', _ => { t.equal(fs.readdirSync(dir).length, 0) @@ -752,7 +865,9 @@ t.test('file in dir path', t => { t.plan(2) t.test('async', t => { - new Unpack({ cwd: dir, unlink: true }).on('close', _ => check(t)).end(data) + new Unpack({ cwd: dir, unlink: true }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -782,11 +897,19 @@ t.test('set umask option', t => { new Unpack({ umask: 0o027, cwd: dir, - }).on('close', _ => { - t.equal(fs.statSync(dir + '/d/i/r').mode & 0o7777, isWindows ? 0o666 : 0o750) - t.equal(fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751) - t.end() - }).end(data) + }) + .on('close', _ => { + t.equal( + fs.statSync(dir + '/d/i/r').mode & 0o7777, + isWindows ? 0o666 : 0o750, + ) + t.equal( + fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, + isWindows ? 0o666 : 0o751, + ) + t.end() + }) + .end(data) }) t.test('absolute paths', t => { @@ -823,11 +946,16 @@ t.test('absolute paths', t => { t.test('warn and correct', t => { const check = t => { const r = normPath(root) - t.match(warnings, [[ - `stripping ${r}${r}${r}${r} from absolute path`, - { path: normPath(absolute), code: 'TAR_ENTRY_INFO' }, - ]]) - t.ok(fs.lstatSync(path.resolve(dir, relative)).isFile(), 'is file') + t.match(warnings, [ + [ + `stripping ${r}${r}${r}${r} from absolute path`, + { path: normPath(absolute), code: 'TAR_ENTRY_INFO' }, + ], + ]) + t.ok( + fs.lstatSync(path.resolve(dir, relative)).isFile(), + 'is file', + ) t.end() } @@ -837,15 +965,17 @@ t.test('absolute paths', t => { warnings.length = 0 new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { warnings.length = 0 new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -883,8 +1013,10 @@ t.test('absolute paths', t => { new Unpack({ preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -892,7 +1024,7 @@ t.test('absolute paths', t => { new UnpackSync({ preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -931,10 +1063,12 @@ t.test('.. paths', t => { t.test('warn and skip', t => { const check = t => { - t.match(warnings, [[ - 'path contains \'..\'', - { path: dotted, code: 'TAR_ENTRY_ERROR' }, - ]]) + t.match(warnings, [ + [ + "path contains '..'", + { path: dotted, code: 'TAR_ENTRY_ERROR' }, + ], + ]) t.throws(_ => fs.lstatSync(resolved)) t.end() } @@ -946,8 +1080,10 @@ t.test('.. paths', t => { new Unpack({ fmode: fmode, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -955,7 +1091,7 @@ t.test('.. paths', t => { new UnpackSync({ fmode: fmode, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -967,7 +1103,10 @@ t.test('.. paths', t => { const check = t => { t.same(warnings, []) t.ok(fs.lstatSync(resolved).isFile(), 'is file') - t.equal(fs.lstatSync(resolved).mode & 0o777, isWindows ? 0o666 : fmode) + t.equal( + fs.lstatSync(resolved).mode & 0o777, + isWindows ? 0o666 : fmode, + ) t.end() } @@ -979,8 +1118,10 @@ t.test('.. paths', t => { fmode: fmode, preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -989,7 +1130,7 @@ t.test('.. paths', t => { fmode: fmode, preservePaths: true, cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -1004,36 +1145,36 @@ t.test('fail all stats', t => { const poop = new Error('poop') poop.code = 'EPOOP' const dir = normPath(path.join(unpackdir, 'stat-fail')) - const { - stat, - fstat, - lstat, - statSync, - fstatSync, - lstatSync, - } = fs - const unmutate = () => Object.assign(fs, { - stat, - fstat, - lstat, - statSync, - fstatSync, - lstatSync, - }) + const { stat, fstat, lstat, statSync, fstatSync, lstatSync } = fs + const unmutate = () => + Object.assign(fs, { + stat, + fstat, + lstat, + statSync, + fstatSync, + lstatSync, + }) const mutate = () => { - fs.stat = fs.lstat = fs.fstat = (...args) => { - // don't fail statting the cwd, or we get different errors - if (normPath(args[0]) === dir) { - return lstat(dir, args.pop()) - } - process.nextTick(() => args.pop()(poop)) - } - fs.statSync = fs.lstatSync = fs.fstatSync = (...args) => { - if (normPath(args[0]) === dir) { - return lstatSync(dir) - } - throw poop - } + fs.stat = + fs.lstat = + fs.fstat = + (...args) => { + // don't fail statting the cwd, or we get different errors + if (normPath(args[0]) === dir) { + return lstat(dir, args.pop()) + } + process.nextTick(() => args.pop()(poop)) + } + fs.statSync = + fs.lstatSync = + fs.fstatSync = + (...args) => { + if (normPath(args[0]) === dir) { + return lstatSync(dir) + } + throw poop + } } const warnings = [] @@ -1105,8 +1246,10 @@ t.test('fail all stats', t => { ] new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t, expect)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t, expect)) + .end(data) }) t.test('sync', t => { @@ -1130,7 +1273,7 @@ t.test('fail all stats', t => { ] new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t, expect) }) @@ -1186,15 +1329,17 @@ t.test('fail symlink', t => { const expect = [['poop', poop]] new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t, expect)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t, expect)) + .end(data) }) t.test('sync', t => { const expect = [['poop', poop]] new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t, expect) }) @@ -1249,15 +1394,17 @@ t.test('fail chmod', t => { const expect = [['poop', poop]] new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t, expect)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t, expect)) + .end(data) }) t.test('sync', t => { const expect = [['poop', poop]] new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t, expect) }) @@ -1294,14 +1441,16 @@ t.test('fail mkdir', t => { '', ]) - const expect = [[ - 'ENOENT: no such file or directory', - { - code: 'ENOENT', - syscall: 'lstat', - path: normPath(path.resolve(dir, 'dir')), - }, - ]] + const expect = [ + [ + 'ENOENT: no such file or directory', + { + code: 'ENOENT', + syscall: 'lstat', + path: normPath(path.resolve(dir, 'dir')), + }, + ], + ] const check = t => { t.match(warnings, expect) @@ -1312,7 +1461,7 @@ t.test('fail mkdir', t => { t.test('sync', t => { new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -1320,8 +1469,10 @@ t.test('fail mkdir', t => { t.test('async', t => { new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t)) + .end(data) }) t.end() @@ -1368,14 +1519,16 @@ t.test('fail write', t => { t.test('async', t => { new Unpack({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), - }).on('close', _ => check(t)).end(data) + onwarn: (_c, w, d) => warnings.push([w, d]), + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { new UnpackSync({ cwd: dir, - onwarn: (c, w, d) => warnings.push([w, d]), + onwarn: (_c, w, d) => warnings.push([w, d]), }).end(data) check(t) }) @@ -1421,7 +1574,9 @@ t.test('skip existing', t => { new Unpack({ cwd: dir, keep: true, - }).on('close', _ => check(t)).end(data) + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -1473,7 +1628,9 @@ t.test('skip newer', t => { new Unpack({ cwd: dir, newer: true, - }).on('close', _ => check(t)).end(data) + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -1537,7 +1694,9 @@ t.test('no mtime', t => { new Unpack({ cwd: dir, noMtime: true, - }).on('close', _ => check(t)).end(data) + }) + .on('close', _ => check(t)) + .end(data) }) t.test('sync', t => { @@ -1563,7 +1722,8 @@ t.test('unpack big enough to pause/drain', t => { }) u.on('ignoredEntry', entry => - t.fail('should not get ignored entry: ' + entry.path)) + t.fail('should not get ignored entry: ' + entry.path), + ) u.on('close', _ => { t.pass('extraction finished') @@ -1584,7 +1744,9 @@ t.test('set owner', t => { const getgid = process.getgid process.getuid = _ => myUid process.getgid = _ => myGid - t.teardown(_ => (process.getuid = getuid, process.getgid = getgid)) + t.teardown( + _ => ((process.getuid = getuid), (process.getgid = getgid)), + ) // can't actually do this because it requires root, but we can // verify that chown gets called. @@ -1688,10 +1850,10 @@ t.test('set owner', t => { mkdirp.sync(dir) t.teardown(_ => rimraf(dir)) let warned = false - const u = new Unpack.Sync({ + const u = new UnpackSync({ cwd: dir, preserveOwner: true, - onwarn: (c, m, er) => { + onwarn: (_c, _m, er) => { if (!warned) { warned = true t.equal(er, poop) @@ -1709,7 +1871,7 @@ t.test('set owner', t => { const u = new Unpack({ cwd: dir, preserveOwner: true, - onwarn: (c, m, er) => { + onwarn: (_c, _m, er) => { if (!warned) { warned = true t.equal(er, poop) @@ -1732,10 +1894,13 @@ t.test('set owner', t => { const fchownSync = fs.fchownSync const lchownSync = fs.lchownSync let called = 0 - fs.fchown = fs.chown = fs.lchown = (path, owner, group, cb) => { - called++ - cb() - } + fs.fchown = + fs.chown = + fs.lchown = + (_path, _owner, _group, cb) => { + called++ + cb() + } fs.chownSync = fs.lchownSync = fs.fchownSync = _ => called++ t.teardown(_ => { @@ -1751,7 +1916,7 @@ t.test('set owner', t => { mkdirp.sync(dir) t.teardown(_ => rimraf(dir)) called = 0 - const u = new Unpack.Sync({ cwd: dir, preserveOwner: true }) + const u = new UnpackSync({ cwd: dir, preserveOwner: true }) u.end(data) t.ok(called >= 5, 'called chowns') t.end() @@ -1797,13 +1962,15 @@ t.test('set owner', t => { t.not(fileStat.gid, 813708013) const dirStat2 = fs.statSync(dir + '/foo/different-uid-nogid') t.not(dirStat2.uid, 2456124561) - const fileStat2 = fs.statSync(dir + '/foo/different-uid-nogid/bar') + const fileStat2 = fs.statSync( + dir + '/foo/different-uid-nogid/bar', + ) t.not(fileStat2.uid, 2456124561) t.end() } t.test('sync', t => { - const u = new Unpack.Sync({ cwd: dir, preserveOwner: false }) + const u = new UnpackSync({ cwd: dir, preserveOwner: false }) u.end(data) check(t) }) @@ -1842,13 +2009,16 @@ t.test('unpack when dir is not writable', t => { t.afterEach(() => rimraf(dir)) const check = t => { - t.equal(fs.statSync(dir + '/a').mode & 0o7777, isWindows ? 0o666 : 0o744) + t.equal( + fs.statSync(dir + '/a').mode & 0o7777, + isWindows ? 0o666 : 0o744, + ) t.equal(fs.readFileSync(dir + '/a/b', 'utf8'), 'a') t.end() } t.test('sync', t => { - const u = new Unpack.Sync({ cwd: dir, strict: true }) + const u = new UnpackSync({ cwd: dir, strict: true }) u.end(data) check(t) }) @@ -1898,7 +2068,7 @@ t.test('transmute chars on windows', t => { }) t.test('sync', t => { - const u = new Unpack.Sync({ + const u = new UnpackSync({ cwd: dir, win32: true, }) @@ -1972,7 +2142,7 @@ t.test('use explicit chmod when required by umask', t => { return t.test('sync', t => { mkdirp.sync(basedir) - const unpack = new Unpack.Sync({ cwd: basedir }) + const unpack = new UnpackSync({ cwd: basedir }) unpack.end(data) check(t) }) @@ -1981,7 +2151,7 @@ t.test('use explicit chmod when required by umask', t => { t.test('dont use explicit chmod if noChmod flag set', t => { process.umask(0o022) const { umask } = process - t.teardown(() => process.umask = umask) + t.teardown(() => (process.umask = umask)) process.umask = () => { throw new Error('should not call process.umask()') } @@ -2014,7 +2184,7 @@ t.test('dont use explicit chmod if noChmod flag set', t => { return t.test('sync', t => { mkdirp.sync(basedir) - const unpack = new Unpack.Sync({ cwd: basedir, noChmod: true }) + const unpack = new UnpackSync({ cwd: basedir, noChmod: true }) unpack.end(data) check(t) }) @@ -2046,18 +2216,24 @@ t.test('chown implicit dirs and also the entries', t => { let chowns = 0 let currentTest = null - fs.lchown = fs.fchown = fs.chown = (path, uid, gid, cb) => { - currentTest.equal(uid, 420, 'chown(' + path + ') uid') - currentTest.equal(gid, 666, 'chown(' + path + ') gid') - chowns++ - cb() - } + fs.lchown = + fs.fchown = + fs.chown = + (path, uid, gid, cb) => { + currentTest.equal(uid, 420, 'chown(' + path + ') uid') + currentTest.equal(gid, 666, 'chown(' + path + ') gid') + chowns++ + cb() + } - fs.lchownSync = fs.chownSync = fs.fchownSync = (path, uid, gid) => { - currentTest.equal(uid, 420, 'chownSync(' + path + ') uid') - currentTest.equal(gid, 666, 'chownSync(' + path + ') gid') - chowns++ - } + fs.lchownSync = + fs.chownSync = + fs.fchownSync = + (path, uid, gid) => { + currentTest.equal(uid, 420, 'chownSync(' + path + ') uid') + currentTest.equal(gid, 666, 'chownSync(' + path + ') gid') + chowns++ + } const data = makeTar([ { @@ -2091,29 +2267,49 @@ t.test('chown implicit dirs and also the entries', t => { } t.test('throws when setting uid/gid improperly', t => { - t.throws(_ => new Unpack({ uid: 420 }), - TypeError('cannot set owner without number uid and gid')) - t.throws(_ => new Unpack({ gid: 666 }), - TypeError('cannot set owner without number uid and gid')) - t.throws(_ => new Unpack({ uid: 1, gid: 2, preserveOwner: true }), - TypeError('cannot preserve owner in archive and also set owner explicitly')) + t.throws( + _ => new Unpack({ uid: 420 }), + TypeError('cannot set owner without number uid and gid'), + ) + t.throws( + _ => new Unpack({ gid: 666 }), + TypeError('cannot set owner without number uid and gid'), + ) + t.throws( + _ => new Unpack({ uid: 1, gid: 2, preserveOwner: true }), + TypeError( + 'cannot preserve owner in archive and also set owner explicitly', + ), + ) t.end() }) const tests = () => - t.test('async', t => { - currentTest = t - mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir, uid: 420, gid: 666 }) - unpack.on('close', _ => check(t)) - unpack.end(data) - }).then(t.test('sync', t => { - currentTest = t - mkdirp.sync(basedir) - const unpack = new Unpack.Sync({ cwd: basedir, uid: 420, gid: 666 }) - unpack.end(data) - check(t) - })) + t + .test('async', t => { + currentTest = t + mkdirp.sync(basedir) + const unpack = new Unpack({ + cwd: basedir, + uid: 420, + gid: 666, + }) + unpack.on('close', _ => check(t)) + unpack.end(data) + }) + .then( + t.test('sync', t => { + currentTest = t + mkdirp.sync(basedir) + const unpack = new UnpackSync({ + cwd: basedir, + uid: 420, + gid: 666, + }) + unpack.end(data) + check(t) + }), + ) tests() @@ -2158,64 +2354,72 @@ t.test('bad cwd setting', t => { fs.writeFileSync(basedir + '/file', 'xyz') - cases.forEach(c => t.test(c.type + ' ' + c.path, t => { - const data = makeTar([ - { - path: c.path, - mode: 0o775, - type: c.type, - size: 0, - uid: null, - gid: null, - }, - '', - '', - ]) - - t.test('cwd is a file', t => { - const cwd = basedir + '/file' - const opt = { cwd: cwd } + cases.forEach(c => + t.test(c.type + ' ' + c.path, t => { + const data = makeTar([ + { + path: c.path, + mode: 0o775, + type: c.type, + size: 0, + uid: null, + gid: null, + }, + '', + '', + ]) - t.throws(_ => new Unpack.Sync(opt).end(data), { - name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', - path: normPath(cwd), - code: 'ENOTDIR', - }) + t.test('cwd is a file', t => { + const cwd = basedir + '/file' + const opt = { cwd: cwd } - new Unpack(opt).on('error', er => { - t.match(er, { + t.throws(_ => new UnpackSync(opt).end(data), { name: 'CwdError', - message: 'ENOTDIR: Cannot cd into \'' + normPath(cwd) + '\'', + message: "ENOTDIR: Cannot cd into '" + normPath(cwd) + "'", path: normPath(cwd), code: 'ENOTDIR', }) - t.end() - }).end(data) - }) - - return t.test('cwd is missing', t => { - const cwd = basedir + '/asdf/asdf/asdf' - const opt = { cwd: cwd } - t.throws(_ => new Unpack.Sync(opt).end(data), { - name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', - path: normPath(cwd), - code: 'ENOENT', + new Unpack(opt) + .on('error', er => { + t.match(er, { + name: 'CwdError', + message: + "ENOTDIR: Cannot cd into '" + normPath(cwd) + "'", + path: normPath(cwd), + code: 'ENOTDIR', + }) + t.end() + }) + .end(data) }) - new Unpack(opt).on('error', er => { - t.match(er, { + return t.test('cwd is missing', t => { + const cwd = basedir + '/asdf/asdf/asdf' + const opt = { cwd: cwd } + + t.throws(_ => new UnpackSync(opt).end(data), { name: 'CwdError', - message: 'ENOENT: Cannot cd into \'' + normPath(cwd) + '\'', + message: "ENOENT: Cannot cd into '" + normPath(cwd) + "'", path: normPath(cwd), code: 'ENOENT', }) - t.end() - }).end(data) - }) - })) + + new Unpack(opt) + .on('error', er => { + t.match(er, { + name: 'CwdError', + message: + "ENOENT: Cannot cd into '" + normPath(cwd) + "'", + path: normPath(cwd), + code: 'ENOENT', + }) + t.end() + }) + .end(data) + }) + }), + ) t.end() }) @@ -2238,7 +2442,8 @@ t.test('transform', t => { 'utf8.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', 'Ω.txt': '[Ω]', - 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': '[Ω]', + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt': + '[Ω]', }, } @@ -2256,8 +2461,12 @@ t.test('transform', t => { } class Bracer extends Minipass { - write (data) { - const d = data.toString().split('').map(c => '[' + c + ']').join('') + write(data) { + const d = data + .toString() + .split('') + .map(c => '[' + c + ']') + .join('') return super.write(d) } } @@ -2289,7 +2498,11 @@ t.test('transform', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, transform: txFn }) + const unpack = new Unpack({ + cwd: dir, + strict: true, + transform: txFn, + }) fs.createReadStream(tf).pipe(unpack) eos(unpack, _ => check(t)) }) @@ -2303,7 +2516,11 @@ t.test('transform', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new UnpackSync({ cwd: dir, strict: true, transform: txFn }) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + transform: txFn, + }) unpack.end(fs.readFileSync(tf)) check(t) }) @@ -2335,7 +2552,11 @@ t.test('transform error', t => { t.test('sync unpack', t => { t.test('strict', t => { - const unpack = new UnpackSync({ cwd: dir, strict: true, transform: txFn }) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + transform: txFn, + }) const expect = 3 let actual = 0 unpack.on('error', er => { @@ -2350,7 +2571,7 @@ t.test('transform error', t => { const unpack = new UnpackSync({ cwd: dir, transform: txFn }) const expect = 3 let actual = 0 - unpack.on('warn', (code, msg, er) => { + unpack.on('warn', (_code, _msg, er) => { t.equal(er, poop) actual++ }) @@ -2363,7 +2584,11 @@ t.test('transform error', t => { t.test('async unpack', t => { // the last error is about the folder being deleted, just ignore that one t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, transform: txFn }) + const unpack = new Unpack({ + cwd: dir, + strict: true, + transform: txFn, + }) t.plan(3) t.teardown(() => { unpack.removeAllListeners('error') @@ -2376,7 +2601,7 @@ t.test('transform error', t => { const unpack = new Unpack({ cwd: dir, transform: txFn }) t.plan(3) t.teardown(() => unpack.removeAllListeners('warn')) - unpack.on('warn', (code, msg, er) => t.equal(er, poop)) + unpack.on('warn', (_code, _msg, er) => t.equal(er, poop)) unpack.end(tardata) }) t.end() @@ -2413,13 +2638,17 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) - unpack.on('finish', t.end) + const unpack = new Unpack({ + cwd: dir, + strict: true, + forceChown: fc, + }) + unpack.on('finish', () => t.end()) unpack.end(tardata) }) t.test('loose', t => { const unpack = new Unpack({ cwd: dir, forceChown: fc }) - unpack.on('finish', t.end) + unpack.on('finish', () => t.end()) unpack.on('warn', t.fail) unpack.end(tardata) }) @@ -2427,12 +2656,16 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + forceChown: fc, + }) unpack.end(tardata) t.end() }) t.test('loose', t => { - const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) + const unpack = new UnpackSync({ cwd: dir, forceChown: fc }) unpack.on('warn', t.fail) unpack.end(tardata) t.end() @@ -2451,7 +2684,11 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack({ cwd: dir, strict: true, forceChown: fc }) + const unpack = new Unpack({ + cwd: dir, + strict: true, + forceChown: fc, + }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) @@ -2459,22 +2696,26 @@ t.test('futimes/fchown failures', t => { t.test('loose', t => { const unpack = new Unpack({ cwd: dir, forceChown: fc }) t.plan(3) - unpack.on('warn', (code, m, er) => t.equal(er, poop)) + unpack.on('warn', (_code, _m, er) => t.equal(er, poop)) unpack.end(tardata) }) }) t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { - const unpack = new Unpack.Sync({ cwd: dir, strict: true, forceChown: fc }) + const unpack = new UnpackSync({ + cwd: dir, + strict: true, + forceChown: fc, + }) t.plan(3) unpack.on('error', er => t.equal(er, poop)) unpack.end(tardata) }) t.test('loose', t => { - const unpack = new Unpack.Sync({ cwd: dir, forceChown: fc }) + const unpack = new UnpackSync({ cwd: dir, forceChown: fc }) t.plan(3) - unpack.on('warn', (c, m, er) => t.equal(er, poop)) + unpack.on('warn', (_c, _m, er) => t.equal(er, poop)) unpack.end(tardata) }) }) @@ -2490,7 +2731,7 @@ t.test('onentry option is preserved', t => { t.teardown(() => rimraf(basedir)) let oecalls = 0 - const onentry = entry => oecalls++ + const onentry = _entry => oecalls++ const data = makeTar([ { path: 'd/i', @@ -2581,7 +2822,11 @@ t.test('do not reuse hardlinks, only nlink=1 files', t => { const check = t => { for (const f in checks) { - t.equal(fs.readFileSync(basedir + '/' + f, 'utf8'), checks[f], f) + t.equal( + fs.readFileSync(basedir + '/' + f, 'utf8'), + checks[f], + f, + ) t.equal(fs.statSync(basedir + '/' + f).nlink, 1, f) } t.end() @@ -2623,10 +2868,15 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { new Unpack(opts) .once('error', er => t.match(er, expect, 'async emits')) .end(dataGzip) - const skip = !/^v([0-9]|1[0-3])\./.test(process.version) ? false + const skip = !/^v([0-9]|1[0-3])\./.test(process.version) + ? false : 'node prior to v14 did not raise sync zlib errors properly' - t.throws(() => new UnpackSync(opts).end(dataGzip), - expect, 'sync throws', { skip }) + t.throws( + () => new UnpackSync(opts).end(dataGzip), + expect, + 'sync throws', + { skip }, + ) }) t.test('bad archive if no gzip', t => { @@ -2639,7 +2889,11 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { new Unpack(opts) .on('error', er => t.match(er, expect, 'async emits')) .end(data) - t.throws(() => new UnpackSync(opts).end(data), expect, 'sync throws') + t.throws( + () => new UnpackSync(opts).end(data), + expect, + 'sync throws', + ) }) t.end() @@ -2650,7 +2904,7 @@ t.test('handle errors on fs.close', t => { const { close, closeSync } = fs // have to actually close them, or else windows gets mad fs.close = (fd, cb) => close(fd, () => cb(poop)) - fs.closeSync = (fd) => { + fs.closeSync = fd => { closeSync(fd) throw poop } @@ -2676,65 +2930,77 @@ t.test('handle errors on fs.close', t => { new Unpack({ cwd: dir + '/async', strict: true }) .on('error', er => t.equal(er, poop, 'async')) .end(data) - t.throws(() => new UnpackSync({ - cwd: normPath(dir + '/sync'), strict: true, - }).end(data), poop, 'sync') + t.throws( + () => + new UnpackSync({ + cwd: normPath(dir + '/sync'), + strict: true, + }).end(data), + poop, + 'sync', + ) }) -t.test('drop entry from dirCache if no longer a directory', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const dir = path.resolve(unpackdir, 'dir-cache-error') - mkdirp.sync(dir + '/sync/y') - mkdirp.sync(dir + '/async/y') - const data = makeTar([ - { - path: 'x', - type: 'Directory', - }, - { - path: 'x', - type: 'SymbolicLink', - linkpath: './y', - }, - { - path: 'x/ginkoid', - type: 'File', - size: 'ginkoid'.length, - }, - 'ginkoid', - '', - '', - ]) - t.plan(2) - const WARNINGS = {} - const check = (t, path) => { - t.equal(fs.statSync(path + '/x').isDirectory(), true) - t.equal(fs.lstatSync(path + '/x').isSymbolicLink(), true) - t.equal(fs.statSync(path + '/y').isDirectory(), true) - t.strictSame(fs.readdirSync(path + '/y'), []) - t.throws(() => fs.readFileSync(path + '/x/ginkoid'), { code: 'ENOENT' }) - t.strictSame(WARNINGS[path], [ - 'TAR_ENTRY_ERROR', - 'Cannot extract through symbolic link', +t.test( + 'drop entry from dirCache if no longer a directory', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const dir = path.resolve(unpackdir, 'dir-cache-error') + mkdirp.sync(dir + '/sync/y') + mkdirp.sync(dir + '/async/y') + const data = makeTar([ + { + path: 'x', + type: 'Directory', + }, + { + path: 'x', + type: 'SymbolicLink', + linkpath: './y', + }, + { + path: 'x/ginkoid', + type: 'File', + size: 'ginkoid'.length, + }, + 'ginkoid', + '', + '', ]) - t.end() - } - t.test('async', t => { - const path = dir + '/async' - new Unpack({ cwd: path }) - .on('warn', (code, msg) => WARNINGS[path] = [code, msg]) - .on('end', () => check(t, path)) - .end(data) - }) - t.test('sync', t => { - const path = dir + '/sync' - new UnpackSync({ cwd: path }) - .on('warn', (code, msg) => WARNINGS[path] = [code, msg]) - .end(data) - check(t, path) - }) -}) + t.plan(2) + const WARNINGS = {} + const check = (t, path) => { + t.equal(fs.statSync(path + '/x').isDirectory(), true) + t.equal(fs.lstatSync(path + '/x').isSymbolicLink(), true) + t.equal(fs.statSync(path + '/y').isDirectory(), true) + t.strictSame(fs.readdirSync(path + '/y'), []) + t.throws(() => fs.readFileSync(path + '/x/ginkoid'), { + code: 'ENOENT', + }) + t.strictSame(WARNINGS[path], [ + 'TAR_ENTRY_ERROR', + 'TAR_SYMLINK_ERROR: Cannot extract through symbolic link', + ]) + t.end() + } + t.test('async', t => { + const path = dir + '/async' + new Unpack({ cwd: path }) + .on('warn', (code, msg) => (WARNINGS[path] = [code, msg])) + .on('end', () => check(t, path)) + .end(data) + }) + t.test('sync', t => { + const path = dir + '/sync' + new UnpackSync({ cwd: path }) + .on('warn', (code, msg) => (WARNINGS[path] = [code, msg])) + .end(data) + check(t, path) + }) + }, +) t.test('using strip option when top level file exists', t => { const dir = path.resolve(unpackdir, 'strip-with-top-file') @@ -2792,7 +3058,8 @@ t.test('using strip option when top level file exists', t => { t.test('handle EPERMs when creating symlinks', t => { // https://github.com/npm/node-tar/issues/265 - const msg = 'You do not have sufficient privilege to perform this operation.' + const msg = + 'You do not have sufficient privilege to perform this operation.' const er = Object.assign(new Error(msg), { code: 'EPERM', }) @@ -2838,11 +3105,15 @@ t.test('handle EPERMs when creating symlinks', t => { mkdirp.sync(`${dir}/async`) const check = path => { - t.match(WARNINGS, [ - ['TAR_ENTRY_ERROR', msg], - ['TAR_ENTRY_ERROR', msg], - ['TAR_ENTRY_ERROR', msg], - ], 'got expected warnings') + t.match( + WARNINGS, + [ + ['TAR_ENTRY_ERROR', msg], + ['TAR_ENTRY_ERROR', msg], + ['TAR_ENTRY_ERROR', msg], + ], + 'got expected warnings', + ) t.equal(WARNINGS.length, 3) WARNINGS.length = 0 t.equal(fs.readFileSync(`${path}/x/y`, 'utf8'), 'hello, world') @@ -2855,13 +3126,13 @@ t.test('handle EPERMs when creating symlinks', t => { const WARNINGS = [] const u = new Unpack({ cwd: `${dir}/async`, - onwarn: (code, msg, er) => WARNINGS.push([code, msg]), + onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), }) u.on('end', () => { check(`${dir}/async`) const u = new UnpackSync({ cwd: `${dir}/sync`, - onwarn: (code, msg, er) => WARNINGS.push([code, msg]), + onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), }) u.end(data) check(`${dir}/sync`) @@ -2888,8 +3159,8 @@ t.test('close fd when error writing', t => { t.teardown(mutateFS.fail('write', new Error('nope'))) const CLOSES = [] const OPENS = {} - const { open } = require('fs') - t.teardown(() => fs.open = open) + const { open } = fs + t.teardown(() => (fs.open = open)) fs.open = (...args) => { const cb = args.pop() args.push((er, fd) => { @@ -2898,10 +3169,12 @@ t.test('close fd when error writing', t => { }) return open.call(fs, ...args) } - t.teardown(mutateFS.mutateArgs('close', ([fd]) => { - CLOSES.push(fd) - return [fd] - })) + t.teardown( + mutateFS.mutateArgs('close', ([fd]) => { + CLOSES.push(fd) + return [fd] + }), + ) const WARNINGS = [] const dir = path.resolve(unpackdir, 'close-on-write-error') mkdirp.sync(dir) @@ -2941,8 +3214,8 @@ t.test('close fd when error setting mtime', t => { t.teardown(mutateFS.fail('utimes', new Error('nooooope'))) const CLOSES = [] const OPENS = {} - const { open } = require('fs') - t.teardown(() => fs.open = open) + const { open } = fs + t.teardown(() => (fs.open = open)) fs.open = (...args) => { const cb = args.pop() args.push((er, fd) => { @@ -2951,10 +3224,12 @@ t.test('close fd when error setting mtime', t => { }) return open.call(fs, ...args) } - t.teardown(mutateFS.mutateArgs('close', ([fd]) => { - CLOSES.push(fd) - return [fd] - })) + t.teardown( + mutateFS.mutateArgs('close', ([fd]) => { + CLOSES.push(fd) + return [fd] + }), + ) const WARNINGS = [] const dir = path.resolve(unpackdir, 'close-on-futimes-error') mkdirp.sync(dir) @@ -2987,7 +3262,10 @@ t.test('do not hang on large files that fail to open()', t => { '', ]) t.teardown(mutateFS.fail('open', new Error('nope'))) - const dir = path.resolve(unpackdir, 'no-hang-for-large-file-failures') + const dir = path.resolve( + unpackdir, + 'no-hang-for-large-file-failures', + ) mkdirp.sync(dir) const WARNINGS = [] const unpack = new Unpack({ @@ -2998,11 +3276,11 @@ t.test('do not hang on large files that fail to open()', t => { t.strictSame(WARNINGS, [['TAR_ENTRY_ERROR', 'nope']]) t.end() }) - unpack.write(data.slice(0, 2048)) + unpack.write(data.subarray(0, 2048)) setTimeout(() => { - unpack.write(data.slice(2048, 4096)) + unpack.write(data.subarray(2048, 4096)) setTimeout(() => { - unpack.write(data.slice(4096)) + unpack.write(data.subarray(4096)) setTimeout(() => { unpack.end() }) @@ -3010,165 +3288,178 @@ t.test('do not hang on large files that fail to open()', t => { }) }) -t.test('dirCache pruning unicode normalized collisions', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const data = makeTar([ - { - type: 'Directory', - path: 'foo', - }, - { - type: 'File', - path: 'foo/bar', - size: 1, - }, - 'x', - { - type: 'Directory', - // café - path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), - }, - { - type: 'SymbolicLink', - // cafe with a ` - path: Buffer.from([0x63, 0x61, 0x66, 0x65, 0xcc, 0x81]).toString(), - linkpath: 'foo', - }, - { - type: 'Directory', - path: 'foo', - }, - { - type: 'File', - path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() + '/bar', - size: 1, - }, - 'y', - '', - '', - ]) - - const check = (path, dirCache, t) => { - path = path.replace(/\\/g, '/') - t.strictSame([...dirCache.entries()][0], [`${path}/foo`, true]) - t.equal(fs.readFileSync(path + '/foo/bar', 'utf8'), 'x') - t.end() - } - - t.test('sync', t => { - const path = t.testdir() - const dirCache = new Map() - new UnpackSync({ cwd: path, dirCache }).end(data) - check(path, dirCache, t) - }) - t.test('async', t => { - const path = t.testdir() - const dirCache = new Map() - new Unpack({ cwd: path, dirCache }) - .on('close', () => check(path, dirCache, t)) - .end(data) - }) +t.test( + 'dirCache pruning unicode normalized collisions', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const data = makeTar([ + { + type: 'Directory', + path: 'foo', + }, + { + type: 'File', + path: 'foo/bar', + size: 1, + }, + 'x', + { + type: 'Directory', + // café + path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), + }, + { + type: 'SymbolicLink', + // cafe with a ` + path: Buffer.from([ + 0x63, 0x61, 0x66, 0x65, 0xcc, 0x81, + ]).toString(), + linkpath: 'foo', + }, + { + type: 'Directory', + path: 'foo', + }, + { + type: 'File', + path: + Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() + + '/bar', + size: 1, + }, + 'y', + '', + '', + ]) - t.end() -}) + const check = (path, dirCache, t) => { + path = path.replace(/\\/g, '/') + t.strictSame([...dirCache.entries()][0], [`${path}/foo`, true]) + t.equal(fs.readFileSync(path + '/foo/bar', 'utf8'), 'x') + t.end() + } -t.test('dircache prune all on windows when symlink encountered', t => { - if (process.platform !== 'win32') { - process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' - t.teardown(() => { - delete process.env.TESTING_TAR_FAKE_PLATFORM + t.test('sync', t => { + const path = t.testdir() + const dirCache = new Map() + new UnpackSync({ cwd: path, dirCache }).end(data) + check(path, dirCache, t) + }) + t.test('async', t => { + const path = t.testdir() + const dirCache = new Map() + new Unpack({ cwd: path, dirCache }) + .on('close', () => check(path, dirCache, t)) + .end(data) }) - } - const symlinks = [] - const Unpack = t.mock('../lib/unpack.js', { - fs: { - ...fs, - symlink: (target, dest, cb) => { - symlinks.push(['async', target, dest]) - process.nextTick(cb) - }, - symlinkSync: (target, dest) => symlinks.push(['sync', target, dest]), - }, - }) - const UnpackSync = Unpack.Sync - const data = makeTar([ - { - type: 'Directory', - path: 'foo', - }, - { - type: 'File', - path: 'foo/bar', - size: 1, - }, - 'x', - { - type: 'Directory', - // café - path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), - }, - { - type: 'SymbolicLink', - // cafe with a ` - path: Buffer.from([0x63, 0x61, 0x66, 0x65, 0xcc, 0x81]).toString(), - linkpath: 'safe/actually/but/cannot/be/too/careful', - }, - { - type: 'File', - path: 'bar/baz', - size: 1, - }, - 'z', - '', - '', - ]) + t.end() + }, +) + +t.test( + 'dircache prune all on windows when symlink encountered', + async t => { + if (process.platform !== 'win32') { + process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' + t.teardown(() => { + delete process.env.TESTING_TAR_FAKE_PLATFORM + }) + } + const symlinks = [] + const { Unpack } = await t.mockImport('../dist/esm/unpack.js', { + fs: { + ...fs, + symlink: (target, dest, cb) => { + symlinks.push(['async', target, dest]) + process.nextTick(cb) + }, + symlinkSync: (target, dest) => + symlinks.push(['sync', target, dest]), + }, + }) - const check = (path, dirCache, t) => { - // symlink blew away all dirCache entries before it - path = path.replace(/\\/g, '/') - t.strictSame([...dirCache.entries()], [ - [`${path}/bar`, true], + const data = makeTar([ + { + type: 'Directory', + path: 'foo', + }, + { + type: 'File', + path: 'foo/bar', + size: 1, + }, + 'x', + { + type: 'Directory', + // café + path: Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString(), + }, + { + type: 'SymbolicLink', + // cafe with a ` + path: Buffer.from([ + 0x63, 0x61, 0x66, 0x65, 0xcc, 0x81, + ]).toString(), + linkpath: 'safe/actually/but/cannot/be/too/careful', + }, + { + type: 'File', + path: 'bar/baz', + size: 1, + }, + 'z', + '', + '', ]) - t.equal(fs.readFileSync(`${path}/foo/bar`, 'utf8'), 'x') - t.equal(fs.readFileSync(`${path}/bar/baz`, 'utf8'), 'z') - t.end() - } - t.test('sync', t => { - const path = t.testdir() - const dirCache = new Map() - new UnpackSync({ cwd: path, dirCache }).end(data) - check(path, dirCache, t) - }) + const check = (path, dirCache, t) => { + // symlink blew away all dirCache entries before it + path = path.replace(/\\/g, '/') + t.strictSame([...dirCache.entries()], [[`${path}/bar`, true]]) + t.equal(fs.readFileSync(`${path}/foo/bar`, 'utf8'), 'x') + t.equal(fs.readFileSync(`${path}/bar/baz`, 'utf8'), 'z') + t.end() + } - t.test('async', t => { - const path = t.testdir() - const dirCache = new Map() - new Unpack({ cwd: path, dirCache }) - .on('close', () => check(path, dirCache, t)) - .end(data) - }) + t.test('sync', t => { + const path = t.testdir() + const dirCache = new Map() + new UnpackSync({ cwd: path, dirCache }).end(data) + check(path, dirCache, t) + }) - t.end() -}) + t.test('async', t => { + const path = t.testdir() + const dirCache = new Map() + new Unpack({ cwd: path, dirCache }) + .on('close', () => check(path, dirCache, t)) + .end(data) + }) -t.test('recognize C:.. as a dot path part', t => { + t.end() + }, +) + +t.test('recognize C:.. as a dot path part', async t => { if (process.platform !== 'win32') { process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' t.teardown(() => { delete process.env.TESTING_TAR_FAKE_PLATFORM }) } - const Unpack = t.mock('../lib/unpack.js', { - path: { - ...path.win32, - win32: path.win32, - posix: path.posix, + const { Unpack, UnpackSync } = await t.mockImport( + '../dist/esm/unpack.js', + { + path: { + ...path.win32, + win32: path.win32, + posix: path.posix, + }, }, - }) - const UnpackSync = Unpack.Sync + ) const data = makeTar([ { @@ -3202,7 +3493,12 @@ t.test('recognize C:.. as a dot path part', t => { 'C:../x/y/z', 'C:../x/y/z', ], - ['TAR_ENTRY_ERROR', "path contains '..'", 'x:../y/z', 'x:../y/z'], + [ + 'TAR_ENTRY_ERROR', + "path contains '..'", + 'x:../y/z', + 'x:../y/z', + ], [ 'TAR_ENTRY_INFO', 'stripping Y: from absolute path', @@ -3218,7 +3514,8 @@ t.test('recognize C:.. as a dot path part', t => { const path = t.testdir() new Unpack({ cwd: path, - onwarn: (c, w, { entry, path }) => warnings.push([c, w, path, entry.path]), + onwarn: (c, w, { entry, path }) => + warnings.push([c, w, path, entry.path]), }) .on('close', () => check(path, warnings, t)) .end(data) @@ -3229,7 +3526,8 @@ t.test('recognize C:.. as a dot path part', t => { const path = t.testdir() new UnpackSync({ cwd: path, - onwarn: (c, w, { entry, path }) => warnings.push([c, w, path, entry.path]), + onwarn: (c, w, { entry, path }) => + warnings.push([c, w, path, entry.path]), }).end(data) check(path, warnings, t) }) @@ -3246,15 +3544,16 @@ t.test('excessively deep subfolder nesting', async t => { const check = (t, maxDepth = 1024) => { t.match(warnings, [ - ['TAR_ENTRY_ERROR', + [ + 'TAR_ENTRY_ERROR', 'path excessively deep', { entry: ReadEntry, path: /^\.(\/a){1024,}\/foo.txt$/, depth: 222372, maxDepth, - } - ] + }, + ], ]) warnings.length = 0 t.end() @@ -3264,15 +3563,17 @@ t.test('excessively deep subfolder nesting', async t => { const cwd = t.testdir() new Unpack({ cwd, - onwarn - }).on('end', () => check(t)).end(data) + onwarn, + }) + .on('end', () => check(t)) + .end(data) }) t.test('sync', t => { const cwd = t.testdir() new UnpackSync({ cwd, - onwarn + onwarn, }).end(data) check(t) }) @@ -3283,7 +3584,9 @@ t.test('excessively deep subfolder nesting', async t => { cwd, onwarn, maxDepth: 64, - }).on('end', () => check(t, 64)).end(data) + }) + .on('end', () => check(t, 64)) + .end(data) }) t.test('sync set md', t => { diff --git a/test/update.js b/test/update.js index 7034a165..9fb57b6f 100644 --- a/test/update.js +++ b/test/update.js @@ -1,24 +1,27 @@ -'use strict' -const t = require('tap') -const u = require('../lib/update.js') -const path = require('path') -const fs = require('fs') -const mutateFS = require('mutate-fs') - -const { resolve } = require('path') +import t from 'tap' +import { update as u } from '../dist/esm/update.js' + +import path, {dirname} from 'path' +import fs from 'fs' +import mutateFS from 'mutate-fs' + +import { resolve } from 'path' +import {fileURLToPath} from 'url' +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') -const zlib = require('zlib') +import zlib from 'zlib' +import { spawn } from 'child_process' -const spawn = require('child_process').spawn const data = fs.readFileSync(tars + '/body-byte-counts.tar') -const dataNoNulls = data.slice(0, data.length - 1024) +const dataNoNulls = data.subarray(0, data.length - 1024) const fixtureDef = { 'body-byte-counts.tar': data, 'no-null-eof.tar': dataNoNulls, - 'truncated-head.tar': Buffer.concat([dataNoNulls, data.slice(0, 500)]), - 'truncated-body.tar': Buffer.concat([dataNoNulls, data.slice(0, 700)]), + 'truncated-head.tar': Buffer.concat([dataNoNulls, data.subarray(0, 500)]), + 'truncated-body.tar': Buffer.concat([dataNoNulls, data.subarray(0, 700)]), 'zero.tar': Buffer.from(''), 'empty.tar': Buffer.alloc(512), 'compressed.tgz': zlib.gzipSync(data), diff --git a/test/warn-mixin.js b/test/warn-method.js similarity index 81% rename from test/warn-mixin.js rename to test/warn-method.js index 36350e47..4d040f42 100644 --- a/test/warn-mixin.js +++ b/test/warn-method.js @@ -1,8 +1,13 @@ -const t = require('tap') -const EE = require('events').EventEmitter -const warner = require('../lib/warn-mixin.js') +import t from 'tap' +import EE from 'events' +import { warnMethod } from '../dist/esm/warn-method.js' + +class Warner extends EE { + warn(code, message, data = {}) { + return warnMethod(this, code, message, data) + } +} -const Warner = warner(EE) const w = new Warner() diff --git a/test/winchars.js b/test/winchars.js index 120c581d..b1ffe12c 100644 --- a/test/winchars.js +++ b/test/winchars.js @@ -1,6 +1,5 @@ -'use strict' -const t = require('tap') -const wc = require('../lib/winchars.js') +import t from 'tap' +import * as wc from '../dist/esm/winchars.js' t.equal(wc.encode('<>'), '\uf03c\uf03e', 'encode') t.equal(wc.decode(wc.encode('<>')), '<>', 'decode') diff --git a/test/write-entry.js b/test/write-entry.js index b72e53a1..27bd364c 100644 --- a/test/write-entry.js +++ b/test/write-entry.js @@ -1,9 +1,27 @@ -'use strict' -const t = require('tap') -const mkdirp = require('mkdirp') +import t from 'tap' +import { mkdirp } from 'mkdirp' +import fs from 'fs' +import { ReadEntry } from '../dist/esm/read-entry.js' +import { makeTar } from './fixtures/make-tar.js' +import { + WriteEntry, + WriteEntrySync, + WriteEntryTar, +} from '../dist/esm/write-entry.js' +import path, { dirname } from 'path' +import { Header } from '../dist/esm/header.js' +import mutateFS from 'mutate-fs' +import { Parser } from '../dist/esm/parse.js' +import { rimraf } from 'rimraf' +import { normalizeWindowsPath as normPath } from '../dist/esm/normalize-windows-path.js' +import { fileURLToPath } from 'url' + +const { default: chmodr } = await import('chmodr') + +const __filename = fileURLToPath(import.meta.url) +const __dirname = dirname(__filename) // make our tests verify that windows link targets get turned into / paths -const fs = require('fs') const { readlink, readlinkSync } = fs fs.readlink = (path, cb) => { readlink(path, (er, path) => { @@ -16,20 +34,10 @@ fs.readlink = (path, cb) => { } fs.readlinkSync = path => readlinkSync(path).replace(/\//g, '\\') -const ReadEntry = require('../lib/read-entry.js') -const makeTar = require('./make-tar.js') -const WriteEntry = require('../lib/write-entry.js') -const path = require('path') const fixtures = path.resolve(__dirname, 'fixtures') const files = path.resolve(fixtures, 'files') -const Header = require('../lib/header.js') -const mutateFS = require('mutate-fs') process.env.USER = 'isaacs' -const chmodr = require('chmodr') -const Parser = require('../lib/parse.js') -const rimraf = require('rimraf') const isWindows = process.platform === 'win32' -const normPath = require('../lib/normalize-windows-path.js') t.test('set up', t => { const one = fs.statSync(files + '/hardlink-1') @@ -49,7 +57,8 @@ t.test('100 byte filename', t => { t.plan(2) const runTest = t => { - const f = '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + const f = + '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' const ws = new WriteEntry(f, { cwd: files, linkCache: linkCache, @@ -75,7 +84,7 @@ t.test('100 byte filename', t => { }, }) - const wss = new WriteEntry.Sync(f, { + const wss = new WriteEntrySync(f, { cwd: files, linkCache: linkCache, statCache: statCache, @@ -83,8 +92,10 @@ t.test('100 byte filename', t => { linkCache = ws.linkCache statCache = ws.statCache - t.equal(out.slice(512).toString('hex'), - wss.read().slice(512).toString('hex')) + t.equal( + out.slice(512).toString('hex'), + wss.read().subarray(512).toString('hex'), + ) t.equal(out.length, 1024) t.equal(out.slice(0, 100).toString(), f) @@ -102,23 +113,25 @@ t.test('100 byte filename', t => { devmin: 0, }) - t.equal(out.slice(512).toString('hex'), + t.equal( + out.slice(512).toString('hex'), '6363636363636363636363636363636363636363636363636363636363636363' + - '6363636363636363636363636363636363636363636363636363636363636363' + - '6363636363636363636363636363636363636363636363636363636363636363' + - '6363636300000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000' + - '0000000000000000000000000000000000000000000000000000000000000000') + '6363636363636363636363636363636363636363636363636363636363636363' + + '6363636363636363636363636363636363636363636363636363636363636363' + + '6363636300000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000' + + '0000000000000000000000000000000000000000000000000000000000000000', + ) t.end() }) @@ -151,7 +164,7 @@ t.test('directory', t => { }) t.equal(out.length, 512) - const wss = new WriteEntry.Sync('dir', { cwd: files }) + const wss = new WriteEntrySync('dir', { cwd: files }) t.equal(wss.read().length, 512) t.match(wss.header, { cksumValid: true, @@ -193,34 +206,38 @@ t.test('empty path for cwd', t => { }) }) -t.test('symlink', { - skip: isWindows && 'symlinks not fully supported', -}, t => { - const ws = new WriteEntry('symlink', { cwd: files }) - let out = [] - ws.on('data', c => out.push(c)) - const header = { - cksumValid: true, - needPax: false, - path: 'symlink', - size: 0, - linkpath: 'hardlink-2', - uname: 'isaacs', - gname: null, - devmaj: 0, - devmin: 0, - } +t.test( + 'symlink', + { + skip: isWindows && 'symlinks not fully supported', + }, + t => { + const ws = new WriteEntry('symlink', { cwd: files }) + let out = [] + ws.on('data', c => out.push(c)) + const header = { + cksumValid: true, + needPax: false, + path: 'symlink', + size: 0, + linkpath: 'hardlink-2', + uname: 'isaacs', + gname: null, + devmaj: 0, + devmin: 0, + } - const wss = new WriteEntry.Sync('symlink', { cwd: files }) - t.match(wss.header, header) + const wss = new WriteEntrySync('symlink', { cwd: files }) + t.match(wss.header, header) - ws.on('end', _ => { - out = Buffer.concat(out) - t.equal(out.length, 512) - t.match(ws.header, header) - t.end() - }) -}) + ws.on('end', _ => { + out = Buffer.concat(out) + t.equal(out.length, 512) + t.match(ws.header, header) + t.end() + }) + }, +) t.test('zero-byte file', t => { const ws = new WriteEntry('files/zero-byte.txt', { cwd: fixtures }) @@ -253,7 +270,8 @@ t.test('zero-byte file, but close fails', t => { const ws = new WriteEntry('files/1024-bytes.txt', { cwd: fixtures }) ws.on('end', _ => - t.fail('should not get an end, because the close fails')) + t.fail('should not get an end, because the close fails'), + ) ws.on('error', er => { t.match(er, { message: 'poop' }) @@ -263,7 +281,7 @@ t.test('zero-byte file, but close fails', t => { }) t.test('hardlinks', t => { - const wss = new WriteEntry.Sync('hardlink-1', { + const wss = new WriteEntrySync('hardlink-1', { cwd: files, }) @@ -298,7 +316,9 @@ t.test('hardlinks far away', t => { const h1 = 'hardlink-1' const f = path.resolve(files, h1) const stat = fs.statSync(f) - const linkCache = new Map([[stat.dev + ':' + stat.ino, '/a/b/c/d/e']]) + const linkCache = new Map([ + [stat.dev + ':' + stat.ino, '/a/b/c/d/e'], + ]) const ws = new WriteEntry('files/hardlink-2', { cwd: fixtures, @@ -327,7 +347,8 @@ t.test('hardlinks far away', t => { }) t.test('really deep path', t => { - const f = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + const f = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' const ws = new WriteEntry(f, { cwd: files }) let out = [] ws.on('data', c => out.push(c)) @@ -352,7 +373,8 @@ t.test('really deep path', t => { }) t.test('no pax', t => { - const f = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + const f = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' const ws = new WriteEntry(f, { cwd: files, noPax: true }) let out = [] ws.on('data', c => out.push(c)) @@ -381,7 +403,7 @@ t.test('nonexistent file', t => { const ws = new WriteEntry('does not exist', { cwd: files }) ws.on('error', er => { t.match(er, { - message: 'ENOENT: no such file or directory, lstat \'' + f + '\'', + message: "ENOENT: no such file or directory, lstat '" + f + "'", code: 'ENOENT', path: f, syscall: 'lstat', @@ -395,7 +417,7 @@ t.test('absolute path', t => { const { root } = path.parse(absolute) const f = root + root + root + absolute const warn = normPath(isWindows ? root : root + root + root + root) - t.test('preservePaths=false strict=false', t => { + t.test('preservePaths=false strict=false warn='+warn, t => { const warnings = [] // on windows, c:\c:\c:\... is a valid path, so just use the // single-root absolute version of it. @@ -408,11 +430,13 @@ t.test('absolute path', t => { ws.on('end', _ => { out = Buffer.concat(out) t.equal(out.length, 1024) - t.match(warnings, [[ - 'TAR_ENTRY_INFO', - `stripping ${warn} from absolute path`, - { path: normPath(isWindows ? absolute : f) }, - ]]) + t.match(warnings, [ + [ + 'TAR_ENTRY_INFO', + `stripping ${warn} from absolute path`, + { path: normPath(isWindows ? absolute : f) }, + ], + ]) t.match(ws.header, { cksumValid: true, @@ -467,22 +491,25 @@ t.test('absolute path', t => { }) t.test('preservePaths=false strict=true', t => { - t.throws(_ => { - new WriteEntry(isWindows ? absolute : f, { - strict: true, - cwd: files, - }) - }, { - message: /stripping .* from absolute path/, - path: normPath(isWindows ? absolute : f), - }) + t.throws( + _ => { + new WriteEntry(isWindows ? absolute : f, { + strict: true, + cwd: files, + }) + }, + { + message: /stripping .* from absolute path/, + path: normPath(isWindows ? absolute : f), + }, + ) t.end() }) t.end() }) -t.throws(_ => new WriteEntry(null), new TypeError('path is required')) +t.throws(() => new WriteEntry(null), TypeError) t.test('no user environ, sets uname to empty string', t => { delete process.env.USER @@ -508,31 +535,35 @@ t.test('no user environ, sets uname to empty string', t => { }) }) -t.test('an unsuppored type', { - skip: isWindows && '/dev/random on windows', -}, t => { - const ws = new WriteEntry('/dev/random', { preservePaths: true }) - ws.on('data', c => { - throw new Error('should not get data from random') - }) - ws.on('stat', stat => { - t.match(stat, { - dev: Number, - mode: 0o020666, - nlink: 1, - rdev: Number, - blksize: Number, - ino: Number, - size: 0, - blocks: 0, +t.test( + 'an unsuppored type', + { + skip: isWindows && '/dev/random on windows', + }, + t => { + const ws = new WriteEntry('/dev/random', { preservePaths: true }) + ws.on('data', (_chunk) => { + throw new Error('should not get data from random') }) - t.ok(stat.isCharacterDevice(), 'random is a character device') - }) - ws.on('end', _ => { - t.match(ws, { type: 'Unsupported', path: '/dev/random' }) - t.end() - }) -}) + ws.on('stat', stat => { + t.match(stat, { + dev: Number, + mode: 0o020666, + nlink: 1, + rdev: Number, + blksize: Number, + ino: Number, + size: 0, + blocks: 0, + }) + t.ok(stat.isCharacterDevice(), 'random is a character device') + }) + ws.on('end', _ => { + t.match(ws, { type: 'Unsupported', path: '/dev/random' }) + t.end() + }) + }, +) t.test('readlink fail', t => { const expect = { @@ -542,23 +573,31 @@ t.test('readlink fail', t => { // pretend everything is a symbolic link, then read something that isn't t.teardown(mutateFS.statType('SymbolicLink')) t.throws(_ => { - return new WriteEntry.Sync('write-entry.js', { cwd: __dirname }) + return new WriteEntrySync('write-entry.js', { cwd: __dirname }) }, expect) - new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { - t.match(er, expect) - t.equal(normPath(er.path), normPath(__filename)) - t.end() - }) + new WriteEntry('write-entry.js', { cwd: __dirname }).on( + 'error', + er => { + t.match(er, expect) + t.equal(normPath(er.path), normPath(__filename)) + t.end() + }, + ) }) t.test('open fail', t => { t.teardown(mutateFS.fail('open', new Error('pwn'))) - t.throws(_ => new WriteEntry.Sync('write-entry.js', { cwd: __dirname }), - { message: 'pwn' }) - new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { - t.match(er, { message: 'pwn' }) - t.end() - }) + t.throws( + _ => new WriteEntrySync('write-entry.js', { cwd: __dirname }), + { message: 'pwn' }, + ) + new WriteEntry('write-entry.js', { cwd: __dirname }).on( + 'error', + er => { + t.match(er, { message: 'pwn' }) + t.end() + }, + ) }) t.test('read fail', t => { @@ -568,16 +607,22 @@ t.test('read fail', t => { syscall: 'read', } // pretend everything is a file, then read something that isn't - t.teardown(mutateFS.statMutate((er, st) => { - if (er) { - return [er, st] - } - st.isFile = () => true - st.size = 123 - })) - t.throws(_ => new WriteEntry.Sync('fixtures', { - cwd: __dirname, - }), expect) + t.teardown( + mutateFS.statMutate((er, st) => { + if (er) { + return [er, st] + } + st.isFile = () => true + st.size = 123 + }), + ) + t.throws( + _ => + new WriteEntrySync('fixtures', { + cwd: __dirname, + }), + expect, + ) new WriteEntry('fixtures', { cwd: __dirname }).on('error', er => { t.match(er, expect) t.end() @@ -585,27 +630,34 @@ t.test('read fail', t => { }) t.test('read invalid EOF', t => { - t.teardown(mutateFS.mutate('read', (er, br) => [er, 0])) + t.teardown(mutateFS.mutate('read', (er, _bytesRead) => [er, 0])) const expect = { message: 'encountered unexpected EOF', path: normPath(__filename), syscall: 'read', code: 'EOF', } - t.throws(_ => new WriteEntry.Sync('write-entry.js', { cwd: __dirname }), - expect) - new WriteEntry('write-entry.js', { cwd: __dirname }).on('error', er => { - t.match(er, expect) - t.end() - }) + t.throws( + _ => new WriteEntrySync('write-entry.js', { cwd: __dirname }), + expect, + ) + new WriteEntry('write-entry.js', { cwd: __dirname }).on( + 'error', + er => { + t.match(er, expect) + t.end() + }, + ) }) t.test('read overflow expectation', t => { - t.teardown(mutateFS.statMutate((er, st) => { - if (st) { - st.size = 3 - } - })) + t.teardown( + mutateFS.statMutate((_er, st) => { + if (st) { + st.size = 3 + } + }), + ) const f = '512-bytes.txt' const expect = { message: 'did not encounter expected EOF', @@ -614,17 +666,23 @@ t.test('read overflow expectation', t => { code: 'EOF', } t.plan(2) - t.throws(_ => new WriteEntry.Sync(f, { cwd: files, maxReadSize: 2 }), expect) - new WriteEntry(f, { cwd: files, maxReadSize: 2 }).on('error', er => { - t.match(er, expect) - }).resume() + t.throws( + _ => new WriteEntrySync(f, { cwd: files, maxReadSize: 2 }), + expect, + ) + new WriteEntry(f, { cwd: files, maxReadSize: 2 }) + .on('error', er => { + t.match(er, expect) + }) + .resume() }) t.test('short reads', t => { t.teardown(mutateFS.zenoRead()) const cases = { '1024-bytes.txt': new Array(1024).join('x') + '\n', - '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': new Array(101).join('c'), + '100-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': + new Array(101).join('c'), } const maxReadSize = [null, 1024, 100, 111] @@ -642,12 +700,21 @@ t.test('short reads', t => { ws.on('data', c => out.push(c)) ws.on('end', _ => { out = Buffer.concat(out) - t.equal(out.length, 512 * Math.ceil(1 + contents.length / 512)) - t.equal(out.slice(512).toString().replace(/\0.*$/, ''), contents) - const wss = new WriteEntry.Sync(filename, { cwd: files }) + t.equal( + out.length, + 512 * Math.ceil(1 + contents.length / 512), + ) + t.equal( + out.slice(512).toString().replace(/\0.*$/, ''), + contents, + ) + const wss = new WriteEntrySync(filename, { cwd: files }) const syncOut = wss.read() t.equal(syncOut.length, out.length) - t.equal(syncOut.slice(512).toString(), out.slice(512).toString()) + t.equal( + syncOut.subarray(512).toString(), + out.slice(512).toString(), + ) t.end() }) }) @@ -658,53 +725,67 @@ t.test('short reads', t => { t.end() }) -t.test('win32 path conversion', { - skip: isWindows && 'no need to test on windows', -}, t => { - const ws = new WriteEntry('long-path\\r', { - cwd: files, - win32: true, - }) - t.equal(ws.path, 'long-path/r') - t.end() -}) - -t.test('win32 <|>? in paths', { - skip: isWindows && 'do not create annoying junk on windows systems', -}, t => { - const file = path.resolve(fixtures, '<|>?.txt') - const uglyName = Buffer.from('ef80bcef81bcef80beef80bf2e747874', 'hex').toString() - const ugly = path.resolve(fixtures, uglyName) - t.teardown(_ => { - rimraf.sync(file) - rimraf.sync(ugly) - }) +t.test( + 'win32 path conversion', + { + skip: isWindows && 'no need to test on windows', + }, + t => { + const ws = new WriteEntry('long-path\\r', { + cwd: files, + win32: true, + }) + t.equal(ws.path, 'long-path/r') + t.end() + }, +) + +t.test( + 'win32 <|>? in paths', + { + skip: + isWindows && 'do not create annoying junk on windows systems', + }, + t => { + const file = path.resolve(fixtures, '<|>?.txt') + const uglyName = Buffer.from( + 'ef80bcef81bcef80beef80bf2e747874', + 'hex', + ).toString() + const ugly = path.resolve(fixtures, uglyName) + t.teardown(_ => { + rimraf.sync(file) + rimraf.sync(ugly) + }) - fs.writeFileSync(ugly, '<|>?') + fs.writeFileSync(ugly, '<|>?') - const wc = new WriteEntry(uglyName, { - cwd: fixtures, - win32: true, - }) + const wc = new WriteEntry(uglyName, { + cwd: fixtures, + win32: true, + }) - const out = [] - wc.on('data', c => out.push(c)) - wc.on('end', _ => { - const data = Buffer.concat(out).toString() - t.equal(data.slice(0, 4), '<|>?') - t.end() - }) + const out = [] + wc.on('data', c => out.push(c)) + wc.on('end', _ => { + const data = Buffer.concat(out).toString() + t.equal(data.slice(0, 4), '<|>?') + t.end() + }) - t.equal(wc.path, '<|>?.txt') - t.equal(wc.absolute, ugly) -}) + t.equal(wc.path, '<|>?.txt') + t.equal(wc.absolute, ugly) + }, +) t.test('uid doesnt match, dont set uname', t => { - t.teardown(mutateFS.statMutate((er, st) => { - if (st) { - st.uid -= 1 - } - })) + t.teardown( + mutateFS.statMutate((_er, st) => { + if (st) { + st.uid -= 1 + } + }), + ) const ws = new WriteEntry('long-path/r', { cwd: files, }) @@ -721,17 +802,17 @@ t.test('override absolute to some other file', t => { ws.on('end', _ => { const data = Buffer.concat(out) t.equal(data.length, 1024) - t.match(data.slice(512).toString(), /^a\0{511}$/) + t.match(data.subarray(512).toString(), /^a\0{511}$/) t.match(ws, { path: 'blerg', header: { size: 1 }, }) - const wss = new WriteEntry.Sync('blerg', { + const wss = new WriteEntrySync('blerg', { absolute: files + '/one-byte.txt', }) const sdata = wss.read() t.equal(sdata.length, 1024) - t.match(sdata.slice(512).toString(), /^a\0{511}$/) + t.match(sdata.subarray(512).toString(), /^a\0{511}$/) t.match(wss, { path: 'blerg', header: { size: 1 }, @@ -741,7 +822,8 @@ t.test('override absolute to some other file', t => { }) t.test('portable entries, nothing platform-specific', t => { - const om = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' + const om = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' const ws = new WriteEntry(om, { cwd: files, portable: true, @@ -775,7 +857,7 @@ t.test('portable entries, nothing platform-specific', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(om, { + const wss = new WriteEntrySync(om, { cwd: files, portable: true, }) @@ -795,7 +877,8 @@ t.test('portable entries, nothing platform-specific', t => { }) t.test('no mtime', t => { - const om = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' + const om = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' const ws = new WriteEntry(om, { cwd: files, noMtime: true, @@ -832,7 +915,7 @@ t.test('no mtime', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(om, { + const wss = new WriteEntrySync(om, { cwd: files, portable: true, noMtime: true, @@ -853,7 +936,8 @@ t.test('no mtime', t => { }) t.test('force mtime', t => { - const om = 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' + const om = + 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt' const date = new Date('1979-07-01T19:10:00.000Z') const ws = new WriteEntry(om, { cwd: files, @@ -891,7 +975,7 @@ t.test('force mtime', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(om, { + const wss = new WriteEntrySync(om, { cwd: files, portable: true, mtime: new Date('1979-07-01T19:10:00.000Z'), @@ -930,7 +1014,7 @@ t.test('portable dir entries, no mtime', t => { } const ps = new Parser() - const wss = new WriteEntry.Sync(dir, { + const wss = new WriteEntrySync(dir, { cwd: files, portable: true, }) @@ -985,19 +1069,19 @@ t.test('write entry from read entry', t => { t.test('basic file', t => { const fileEntry = new ReadEntry(new Header(data)) - const wetFile = new WriteEntry.Tar(fileEntry) + const wetFile = new WriteEntryTar(fileEntry) const out = [] let wetFileEnded = false wetFile.on('data', c => out.push(c)) - wetFile.on('end', _ => wetFileEnded = true) - fileEntry.write(data.slice(512, 550)) - fileEntry.write(data.slice(550, 1000)) - fileEntry.end(data.slice(1000, 1024)) + wetFile.on('end', _ => (wetFileEnded = true)) + fileEntry.write(data.subarray(512, 550)) + fileEntry.write(data.subarray(550, 1000)) + fileEntry.end(data.subarray(1000, 1024)) t.equal(wetFileEnded, true) const result = Buffer.concat(out) t.equal(result.length, 1024) t.equal(result.toString().replace(/\0.*$/, ''), '$') - const body = result.slice(512).toString().replace(/\0*$/, '') + const body = result.subarray(512).toString().replace(/\0*$/, '') t.equal(body, '$$$$$$$$$$') t.end() }) @@ -1019,11 +1103,11 @@ t.test('write entry from read entry', t => { '', ]) const fileEntry = new ReadEntry(new Header(data)) - const wetFile = new WriteEntry.Tar(fileEntry, { portable: true }) + const wetFile = new WriteEntryTar(fileEntry, { portable: true }) const out = [] let wetFileEnded = false wetFile.on('data', c => out.push(c)) - wetFile.on('end', _ => wetFileEnded = true) + wetFile.on('end', _ => (wetFileEnded = true)) fileEntry.end() t.equal(wetFileEnded, true) const result = Buffer.concat(out) @@ -1034,19 +1118,19 @@ t.test('write entry from read entry', t => { t.test('with pax header', t => { const fileEntryPax = new ReadEntry(new Header(data)) fileEntryPax.path = new Array(200).join('$') - const wetPax = new WriteEntry.Tar(fileEntryPax) + const wetPax = new WriteEntryTar(fileEntryPax) let wetPaxEnded = false const out = [] wetPax.on('data', c => out.push(c)) - wetPax.on('end', _ => wetPaxEnded = true) - fileEntryPax.write(data.slice(512, 550)) - fileEntryPax.write(data.slice(550, 1000)) - fileEntryPax.end(data.slice(1000, 1024)) + wetPax.on('end', _ => (wetPaxEnded = true)) + fileEntryPax.write(data.subarray(512, 550)) + fileEntryPax.write(data.subarray(550, 1000)) + fileEntryPax.end(data.subarray(1000, 1024)) t.equal(wetPaxEnded, true) const result = Buffer.concat(out) t.equal(result.length, 2048) - t.match(result.slice(1024, 1124).toString(), /^\$+\0?$/) - const body = result.slice(1536).toString().replace(/\0*$/, '') + t.match(result.subarray(1024, 1124).toString(), /^\$+\0?$/) + const body = result.subarray(1536).toString().replace(/\0*$/, '') t.match(new Header(result), { type: 'ExtendedHeader' }) t.equal(body, '$$$$$$$$$$') t.end() @@ -1055,26 +1139,28 @@ t.test('write entry from read entry', t => { t.test('pax and portable', t => { const fileEntryPax = new ReadEntry(new Header(data)) fileEntryPax.path = new Array(200).join('$') - const wetPax = new WriteEntry.Tar(fileEntryPax, { portable: true }) + const wetPax = new WriteEntryTar(fileEntryPax, { + portable: true, + }) let wetPaxEnded = false const out = [] wetPax.on('data', c => out.push(c)) - wetPax.on('end', _ => wetPaxEnded = true) - fileEntryPax.write(data.slice(512, 550)) - fileEntryPax.write(data.slice(550, 1000)) - fileEntryPax.end(data.slice(1000, 1024)) + wetPax.on('end', _ => (wetPaxEnded = true)) + fileEntryPax.write(data.subarray(512, 550)) + fileEntryPax.write(data.subarray(550, 1000)) + fileEntryPax.end(data.subarray(1000, 1024)) t.equal(wetPaxEnded, true) const result = Buffer.concat(out) t.equal(result.length, 2048) - t.match(result.slice(1024, 1124).toString(), /^\$+\0?$/) + t.match(result.subarray(1024, 1124).toString(), /^\$+\0?$/) t.match(new Header(result), { type: 'ExtendedHeader' }) - t.match(new Header(result.slice(1024)), { + t.match(new Header(result.subarray(1024)), { ctime: null, atime: null, uname: '', gname: '', }) - const body = result.slice(1536).toString().replace(/\0*$/, '') + const body = result.subarray(1536).toString().replace(/\0*$/, '') t.equal(body, '$$$$$$$$$$') t.end() }) @@ -1082,30 +1168,30 @@ t.test('write entry from read entry', t => { t.test('pax, portable, and noMtime', t => { const fileEntryPax = new ReadEntry(new Header(data)) fileEntryPax.path = new Array(200).join('$') - const wetPax = new WriteEntry.Tar(fileEntryPax, { + const wetPax = new WriteEntryTar(fileEntryPax, { noMtime: true, portable: true, }) let wetPaxEnded = false const out = [] wetPax.on('data', c => out.push(c)) - wetPax.on('end', _ => wetPaxEnded = true) - fileEntryPax.write(data.slice(512, 550)) - fileEntryPax.write(data.slice(550, 1000)) - fileEntryPax.end(data.slice(1000, 1024)) + wetPax.on('end', _ => (wetPaxEnded = true)) + fileEntryPax.write(data.subarray(512, 550)) + fileEntryPax.write(data.subarray(550, 1000)) + fileEntryPax.end(data.subarray(1000, 1024)) t.equal(wetPaxEnded, true) const result = Buffer.concat(out) t.equal(result.length, 2048) - t.match(result.slice(1024, 1124).toString(), /^\$+\0?$/) + t.match(result.subarray(1024, 1124).toString(), /^\$+\0?$/) t.match(new Header(result), { type: 'ExtendedHeader' }) - t.match(new Header(result.slice(1024)), { + t.match(new Header(result.subarray(1024)), { mtime: null, ctime: null, atime: null, uname: '', gname: '', }) - const body = result.slice(1536).toString().replace(/\0*$/, '') + const body = result.subarray(1536).toString().replace(/\0*$/, '') t.equal(body, '$$$$$$$$$$') t.end() }) @@ -1116,18 +1202,22 @@ t.test('write entry from read entry', t => { t.test('warn', t => { const warnings = [] - new WriteEntry.Tar(fileEntry, { + new WriteEntryTar(fileEntry, { onwarn: (code, msg, data) => warnings.push(code, msg, data), }) - t.match(warnings, ['TAR_ENTRY_INFO', 'stripping / from absolute path', { - path: '/a/b/c', - }]) + t.match(warnings, [ + 'TAR_ENTRY_INFO', + 'stripping / from absolute path', + { + path: '/a/b/c', + }, + ]) t.end() }) t.test('preserve', t => { const warnings = [] - new WriteEntry.Tar(fileEntry, { + new WriteEntryTar(fileEntry, { onwarn: (code, msg, data) => warnings.push(code, msg, data), preservePaths: true, }) @@ -1136,41 +1226,48 @@ t.test('write entry from read entry', t => { }) t.test('throw', t => { - t.throws(_ => new WriteEntry.Tar(fileEntry, { - strict: true, - })) + t.throws( + _ => + new WriteEntryTar(fileEntry, { + strict: true, + }), + ) t.end() }) t.end() }) t.test('no block remain', t => { - const readEntry = new ReadEntry(new Header({ - size: 512, - type: 'File', - path: 'x', - })) - const wet = new WriteEntry.Tar(readEntry) + const readEntry = new ReadEntry( + new Header({ + size: 512, + type: 'File', + path: 'x', + }), + ) + const wet = new WriteEntryTar(readEntry) const out = [] wet.on('data', c => out.push(c)) let wetEnded = false - wet.on('end', _ => wetEnded = true) + wet.on('end', _ => (wetEnded = true)) t.equal(wetEnded, false) readEntry.end(Buffer.from(new Array(513).join('@'))) t.equal(wetEnded, true) const res = Buffer.concat(out) t.equal(res.length, 1024) - t.match(res.slice(512).toString(), /^@+$/) + t.match(res.subarray(512).toString(), /^@+$/) t.end() }) t.test('write more than appropriate', t => { - const readEntry = new ReadEntry(new Header({ - path: 'x', - type: 'File', - size: '1', - })) - const wet = new WriteEntry.Tar(readEntry) + const readEntry = new ReadEntry( + new Header({ + path: 'x', + type: 'File', + size: '1', + }), + ) + const wet = new WriteEntryTar(readEntry) t.throws(_ => wet.write(Buffer.from(new Array(1024).join('x')))) t.end() }) @@ -1212,7 +1309,9 @@ t.test('prefix and hard links', t => { path: 'PaxHeader/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'ExtendedHeader', }, - new RegExp('^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target'), + new RegExp( + '^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target', + ), { path: 'out/x/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'Link', @@ -1251,11 +1350,11 @@ t.test('prefix and hard links', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal(data.subarray(i * 512, i * 512 + e.length).toString(), e) } else if (e instanceof RegExp) { - t.match(data.slice(i * 512, (i + 1) * 512).toString(), e) + t.match(data.subarray(i * 512, (i + 1) * 512).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } @@ -1271,11 +1370,12 @@ t.test('prefix and hard links', t => { statCache, } const out = [] - const entry = (path) => new Promise(resolve => { - const p = new Class(path, opt) - p.on('end', resolve) - p.on('data', d => out.push(d)) - }) + const entry = path => + new Promise(resolve => { + const p = new Class(path, opt) + p.on('end', resolve) + p.on('data', d => out.push(d)) + }) await entry(path) if (path === '.') { @@ -1299,8 +1399,8 @@ t.test('prefix and hard links', t => { }) t.test('sync', t => { - t.test('.', t => runTest(t, '.', WriteEntry.Sync)) - return t.test('./', t => runTest(t, './', WriteEntry.Sync)) + t.test('.', t => runTest(t, '.', WriteEntrySync)) + return t.test('./', t => runTest(t, './', WriteEntrySync)) }) t.end() @@ -1328,7 +1428,9 @@ t.test('prefix and hard links from tar entries', t => { path: 'PaxHeader/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'ExtendedHeader', }, - new RegExp('^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target'), + new RegExp( + '^266 path=out.x.' + long + '[\\w\\W]*linkpath=out.x.target', + ), { path: 'out/x/yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy', type: 'Link', @@ -1425,16 +1527,16 @@ t.test('prefix and hard links from tar entries', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal(data.subarray(i * 512, i * 512 + e.length).toString(), e) } else if (e instanceof RegExp) { - t.match(data.slice(i * 512, (i + 1) * 512).toString(), e) + t.match(data.subarray(i * 512, (i + 1) * 512).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } - const runTest = async (t, path) => { + const runTest = async (t, _path) => { const linkCache = new Map() const statCache = new Map() const opt = { @@ -1447,7 +1549,7 @@ t.test('prefix and hard links from tar entries', t => { const parser = new Parser({ strict: true, onentry: readEntry => { - const p = new WriteEntry.Tar(readEntry, opt) + const p = new WriteEntryTar(readEntry, opt) p.on('data', d => out.push(d)) }, }) @@ -1520,9 +1622,9 @@ t.test('hard links and no prefix', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal(data.subarray(i * 512, i * 512 + e.length).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } @@ -1536,11 +1638,12 @@ t.test('hard links and no prefix', t => { statCache, } const out = [] - const entry = (path) => new Promise(resolve => { - const p = new Class(path, opt) - p.on('end', resolve) - p.on('data', d => out.push(d)) - }) + const entry = path => + new Promise(resolve => { + const p = new Class(path, opt) + p.on('end', resolve) + p.on('data', d => out.push(d)) + }) await entry(path) if (path === '.') { @@ -1563,8 +1666,8 @@ t.test('hard links and no prefix', t => { }) t.test('sync', t => { - t.test('.', t => runTest(t, '.', WriteEntry.Sync)) - return t.test('./', t => runTest(t, './', WriteEntry.Sync)) + t.test('.', t => runTest(t, '.', WriteEntrySync)) + return t.test('./', t => runTest(t, './', WriteEntrySync)) }) t.end() @@ -1665,16 +1768,16 @@ t.test('hard links from tar entries and no prefix', t => { const data = Buffer.concat(out) expect.forEach((e, i) => { if (typeof e === 'string') { - t.equal(data.slice(i * 512, i * 512 + e.length).toString(), e) + t.equal(data.subarray(i * 512, i * 512 + e.length).toString(), e) } else if (e instanceof RegExp) { - t.match(data.slice(i * 512, (i + 1) * 512).toString(), e) + t.match(data.subarray(i * 512, (i + 1) * 512).toString(), e) } else { - t.match(new Header(data.slice(i * 512, (i + 1) * 512)), e) + t.match(new Header(data.subarray(i * 512, (i + 1) * 512)), e) } }) } - const runTest = async (t, path) => { + const runTest = async (t, _path) => { const linkCache = new Map() const statCache = new Map() const opt = { @@ -1685,7 +1788,7 @@ t.test('hard links from tar entries and no prefix', t => { const out = [] const parser = new Parser({ onentry: readEntry => { - const p = new WriteEntry.Tar(readEntry, opt) + const p = new WriteEntryTar(readEntry, opt) p.on('data', d => out.push(d)) }, }) From 734434c3e8e8e4c87cfb12e6c8df2252540a99c1 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 08:51:07 -0700 Subject: [PATCH 03/12] changelog v7, flip default chmod behavior The `noChmod` option is deprecated, and replaced with the `chmod` option. The default is now to *not* explicitly set the mode of created filesystem entries. This is a deviation from `tar(1)` implementations, but prevents the need to call `process.umask()`, which is deprecated and not thread safe. To still set modes explicitly, while avoiding the need to call `process.umask()`, a `processUmask` numeric option may now be provided along with `chmod: true`. --- CHANGELOG.md | 37 +++++- README.md | 45 +++++--- package.json | 55 +++++++++ src/mkdir.ts | 1 - src/options.ts | 292 +++++++++++++++++++++++++++++++++++++++--------- src/unpack.ts | 16 +-- test/options.js | 3 + test/unpack.js | 38 ++++--- 8 files changed, 389 insertions(+), 98 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f4b27a7a..c8c81472 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,26 +1,51 @@ # Changelog +## 7.0 + +- Rewrite in TypeScript, provide ESM and CommonJS hybrid + interface +- Add tree-shake friendly exports, like `import('tar/create')` + and `import('tar/read-entry')` to get individual functions or + classes. +- Add `chmod` option that defaults to false, and deprecate + `noChmod`. That is, reverse the default option regarding + explicitly setting file system modes to match tar entry + settings. +- Add `processUmask` option to avoid having to call + `process.umask()` when `chmod: true` (or `noChmod: false`) is + set. + ## 6.2 -* Add support for brotli compression +- Add support for brotli compression +- Add `maxDepth` option to prevent extraction into excessively + deep folders. + +## 6.1.15 + +- Normalize unicode internally using NFD + +## 6.1.14 + +- Update minipass dependency ## [6.1.13](https://github.com/npm/node-tar/compare/v6.1.12...v6.1.13) (2022-12-07) ### Dependencies -* [`cc4e0dd`](https://github.com/npm/node-tar/commit/cc4e0ddfe523a0bce383846a67442c637a65d486) [#343](https://github.com/npm/node-tar/pull/343) bump minipass from 3.3.6 to 4.0.0 +- [`cc4e0dd`](https://github.com/npm/node-tar/commit/cc4e0ddfe523a0bce383846a67442c637a65d486) [#343](https://github.com/npm/node-tar/pull/343) bump minipass from 3.3.6 to 4.0.0 ## [6.1.12](https://github.com/npm/node-tar/compare/v6.1.11...v6.1.12) (2022-10-31) ### Bug Fixes -* [`57493ee`](https://github.com/npm/node-tar/commit/57493ee66ece50d62114e02914282fc37be3a91a) [#332](https://github.com/npm/node-tar/pull/332) ensuring close event is emited after stream has ended (@webark) -* [`b003c64`](https://github.com/npm/node-tar/commit/b003c64f624332e24e19b30dc011069bb6708680) [#314](https://github.com/npm/node-tar/pull/314) replace deprecated String.prototype.substr() (#314) (@CommanderRoot, @lukekarrys) +- [`57493ee`](https://github.com/npm/node-tar/commit/57493ee66ece50d62114e02914282fc37be3a91a) [#332](https://github.com/npm/node-tar/pull/332) ensuring close event is emited after stream has ended (@webark) +- [`b003c64`](https://github.com/npm/node-tar/commit/b003c64f624332e24e19b30dc011069bb6708680) [#314](https://github.com/npm/node-tar/pull/314) replace deprecated String.prototype.substr() (#314) (@CommanderRoot, @lukekarrys) ### Documentation -* [`f129929`](https://github.com/npm/node-tar/commit/f12992932f171ea248b27fad95e7d489a56d31ed) [#313](https://github.com/npm/node-tar/pull/313) remove dead link to benchmarks (#313) (@yetzt) -* [`c1faa9f`](https://github.com/npm/node-tar/commit/c1faa9f44001dfb0bc7638b2850eb6058bd56a4a) add examples/explanation of using tar.t (@isaacs) +- [`f129929`](https://github.com/npm/node-tar/commit/f12992932f171ea248b27fad95e7d489a56d31ed) [#313](https://github.com/npm/node-tar/pull/313) remove dead link to benchmarks (#313) (@yetzt) +- [`c1faa9f`](https://github.com/npm/node-tar/commit/c1faa9f44001dfb0bc7638b2850eb6058bd56a4a) add examples/explanation of using tar.t (@isaacs) ## 6.0 diff --git a/README.md b/README.md index 296229c5..971638ab 100644 --- a/README.md +++ b/README.md @@ -156,12 +156,13 @@ to see how tar is handling the issue. The API mimics the `tar(1)` command line functionality, with aliases for more human-readable option and function names. The goal is that if you know how to use `tar(1)` in Unix, then you know how to use -`require('tar')` in JavaScript. +`import('tar')` in JavaScript. To replicate `tar czf my-tarball.tgz files and folders`, you'd do: ```js -tar.c( +import { create } from 'tar' +create( { gzip: , file: 'my-tarball.tgz' @@ -173,9 +174,12 @@ tar.c( To replicate `tar cz files and folders > my-tarball.tgz`, you'd do: ```js -tar.c( // or tar.create +// if you're familiar with the tar(1) cli flags, this can be nice +import * as tar from 'tar' +tar.c( { - gzip: + // 'z' is alias for 'gzip' option + z: }, ['some', 'files', 'and', 'folders'] ).pipe(fs.createWriteStream('my-tarball.tgz')) @@ -184,9 +188,10 @@ tar.c( // or tar.create To replicate `tar xf my-tarball.tgz` you'd do: ```js -tar.x( // or tar.extract( +tar.x( // or `tar.extract` { - file: 'my-tarball.tgz' + // or `file:` + f: 'my-tarball.tgz' } ).then(_=> { .. tarball has been dumped in cwd .. }) ``` @@ -424,11 +429,15 @@ The following options are supported: that passes the filter. - `onwarn` A function that will get called with `(code, message, data)` for any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. +- `chmod` Set to true to call `fs.chmod()` to ensure that the + extracted file matches the entry mode. This may necessitate a + call to the deprecated and thread-unsafe `process.umask()` + method to determine the default umask value, unless a + `processUmask` options is also provided. Otherwise tar will + extract with whatever mode is provided, and let the process + `umask` apply normally. +- `processUmask` Set to an explicit numeric value to avoid + calling `process.umask()` when `chmod: true` is set. - `maxDepth` The maximum depth of subfolders to extract into. This defaults to 1024. Anything deeper than the limit will raise a warning and skip the entry. Set to `Infinity` to remove the @@ -751,11 +760,15 @@ Most unpack errors will cause a `warn` event to be emitted. If the that passes the filter. - `onwarn` A function that will get called with `(code, message, data)` for any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. +- `chmod` Set to true to call `fs.chmod()` to ensure that the + extracted file matches the entry mode. This may necessitate a + call to the deprecated and thread-unsafe `process.umask()` + method to determine the default umask value, unless a + `processUmask` options is also provided. Otherwise tar will + extract with whatever mode is provided, and let the process + `umask` apply normally. +- `processUmask` Set to an explicit numeric value to avoid + calling `process.umask()` when `chmod: true` is set. - `maxDepth` The maximum depth of subfolders to extract into. This defaults to 1024. Anything deeper than the limit will raise a warning and skip the entry. Set to `Infinity` to remove the diff --git a/package.json b/package.json index 57eaf9a1..4090358b 100644 --- a/package.json +++ b/package.json @@ -66,11 +66,16 @@ "exports": { "./package.json": "./package.json", ".": "./src/index.ts", + "./c": "./src/create.ts", "./create": "./src/create.ts", "./replace": "./src/create.ts", + "./r": "./src/create.ts", "./list": "./src/list.ts", + "./t": "./src/list.ts", "./update": "./src/update.ts", + "./u": "./src/update.ts", "./extract": "./src/extract.ts", + "./x": "./src/extract.ts", "./pack": "./src/pack.ts", "./unpack": "./src/unpack.ts", "./parse": "./src/parse.ts", @@ -93,6 +98,16 @@ "default": "./dist/commonjs/index.js" } }, + "./c": { + "import": { + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, "./create": { "import": { "types": "./dist/esm/create.d.ts", @@ -113,6 +128,16 @@ "default": "./dist/commonjs/create.js" } }, + "./r": { + "import": { + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, "./list": { "import": { "types": "./dist/esm/list.d.ts", @@ -123,6 +148,16 @@ "default": "./dist/commonjs/list.js" } }, + "./t": { + "import": { + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, "./update": { "import": { "types": "./dist/esm/update.d.ts", @@ -133,6 +168,16 @@ "default": "./dist/commonjs/update.js" } }, + "./u": { + "import": { + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, "./extract": { "import": { "types": "./dist/esm/extract.d.ts", @@ -143,6 +188,16 @@ "default": "./dist/commonjs/extract.js" } }, + "./x": { + "import": { + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, "./pack": { "import": { "types": "./dist/esm/pack.d.ts", diff --git a/src/mkdir.ts b/src/mkdir.ts index cf98d1ac..f65c98ef 100644 --- a/src/mkdir.ts +++ b/src/mkdir.ts @@ -17,7 +17,6 @@ export type MkdirOptions = { cache: Map cwd: string mode: number - noChmod: boolean } export type MkdirError = diff --git a/src/options.ts b/src/options.ts index a612f142..fe33ac46 100644 --- a/src/options.ts +++ b/src/options.ts @@ -38,6 +38,9 @@ const argmap = new Map( * Aliases are provided in the {@link TarOptionsWithAliases} type. */ export interface TarOptions { + ////////////////////////// + // shared options + /** * Perform all I/O operations synchronously. If the stream is ended * immediately, then it will be processed entirely synchronously. @@ -92,7 +95,52 @@ export interface TarOptions { preservePaths?: boolean /** - * When extracting, unlink files before creating them. Without this option, + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ + noMtime?: boolean + + /** + * Set to `true` or an object with settings for `zlib.BrotliCompress()` to + * create a brotli-compressed archive + * + * When extracting, this will cause the archive to be treated as a + * brotli-compressed file if set to `true` or a ZlibOptions object. + * + * If set `false`, then brotli options will not be used. + * + * If both this and the `gzip` option are left `undefined`, then tar will + * attempt to infer the brotli compression status, but can only do so based + * on the filename. If the filename ends in `.tbr` or `.tar.br`, and the + * first 512 bytes are not a valid tar header, then brotli decompression + * will be attempted. + */ + brotli?: boolean | ZlibOptions + + /** + * A function that is called with `(path, stat)` when creating an archive, or + * `(path, entry)` when extracting. Return true to process the file/entry, or + * false to exclude it. + */ + filter?: (path: string, entry: Stats | ReadEntry) => boolean + + /** + * A function that gets called for any warning encountered. + * + * Note: if `strict` is set, then the warning will throw, and this method + * will not be called. + */ + onwarn?: (code: string, message: string, data: WarnData) => any + + ////////////////////////// + // extraction options + + /** + * When extracting, unlink files before creating them. Without this option, * tar overwrites existing files, which preserves existing hardlinks. With * this option, existing hardlinks will be broken, as will any symlink that * would affect the location of an extracted file. @@ -103,6 +151,8 @@ export interface TarOptions { * When extracting, strip the specified number of path portions from the * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be * extracted to `{cwd}/c/d`. + * + * Any entry whose entire path is stripped will be excluded. */ strip?: number @@ -118,54 +168,26 @@ export interface TarOptions { keep?: boolean /** - * When extracting, do not set the `mtime` value for extracted entries to - * match the `mtime` in the archive. - * - * When creating archives, do not store the `mtime` value in the entry. Note - * that this prevents properly using other mtime-based features (such as - * `tar.update` or the `newer` option) with the resulting archive. - */ - noMtime?: boolean - - /** - * Set the `uid` and `gid` of extracted entries to the `uid` and `gid` fields - * in the archive. Defaults to true when run as root, and false otherwise. + * When extracting, set the `uid` and `gid` of extracted entries to the `uid` + * and `gid` fields in the archive. Defaults to true when run as root, and + * false otherwise. * * If false, then files and directories will be set with the owner and group - * of the user running the process. This is similar to `-p` in `tar(1)`, but + * of the user running the process. This is similar to `-p` in `tar(1)`, but * ACLs and other system-specific data is never unpacked in this * implementation, and modes are set by default already. */ preserveOwner?: boolean /** - * Pack the targets of symbolic links rather than the link itself. - */ - follow?: boolean - - /** - * Set to `true` or an object with settings for `zlib.BrotliCompress()` to - * create a brotli-compressed archive - */ - brotli?: boolean | ZlibOptions - - /** - * A function that is called with `(path, stat)` when creating an archive, or - * `(path, entry)` when unpacking. Return true to process the file/entry, or - * false to exclude it. - */ - filter?: (path: string, entry: Stats | ReadEntry) => boolean - - /** - * A function that gets called for any warning encountered. - * - * Note: if `strict` is set, then the warning will throw, and this method - * will not be called. + * The maximum depth of subfolders to extract into. This defaults to 1024. + * Anything deeper than the limit will raise a warning and skip the entry. + * Set to `Infinity` to remove the limitation. */ - onwarn?: (code: string, message: string, data: WarnData) => any + maxDepth?: number /** - * When unpacking, force all created files and directories, and all + * When extracting, force all created files and directories, and all * implicitly created directories, to be owned by the specified user id, * regardless of the `uid` field in the archive. * @@ -175,7 +197,7 @@ export interface TarOptions { uid?: number /** - * When unpacking, force all created files and directories, and all + * When extracting, force all created files and directories, and all * implicitly created directories, to be owned by the specified group id, * regardless of the `gid` field in the archive. * @@ -201,27 +223,55 @@ export interface TarOptions { transform?: (entry: ReadEntry) => any /** - * The maximum depth of subfolders to extract into. This defaults to 1024. - * Anything deeper than the limit will raise a warning and skip the entry. - * Set to `Infinity` to remove the limitation. + * Call `chmod()` to ensure that extracted files match the entry's mode + * field. Without this field set, all mode fields in archive entries are a + * best effort attempt only. + * + * Setting this necessitates a call to the deprecated `process.umask()` + * method to determine the default umask value, unless a `processUmask` + * config is provided as well. + * + * If not set, tar will attempt to create file system entries with whatever + * mode is provided, and let the implicit process `umask` apply normally, but + * if a file already exists to be written to, then its existing mode will not + * be modified. + * + * When setting `chmod: true`, it is highly recommend to set the + * {@link TarOptions#processUmask} option as well, to avoid the call to the + * deprecated (and thread-unsafe) `process.umask()` method. */ - maxDepth?: number + chmod?: boolean /** - * Do not call `chmod()` to ensure that extracted files match the entry's - * mode field. This also suppresses the call to `process.umask()` to - * determine the default umask value, since tar will extract with whatever - * mode is provided, and let the process `umask` apply normally. + * When setting the {@link TarOptions#noChmod} option to `false`, you may + * provide a value here to avoid having to call the deprecated and + * thread-unsafe `process.umask()` method. + * + * This has no effect with `noChmod` is not set to false explicitly, as + * mode values are not set explicitly anyway. If `noChmod` is set to `false`, + * and a value is not provided here, then `process.umask()` must be called, + * which will result in deprecation warnings. + * + * The most common values for this are `0o22` (resulting in directories + * created with mode `0o755` and files with `0o644` by default) and `0o2` + * (resulting in directores created with mode `0o775` and files `0o664`, so + * they are group-writable). */ - noChmod?: boolean + processUmask?: number + + ////////////////////////// + // archive creation options /** * When parsing/listing archives, `entry` streams are by default resumed * (set into "flowing" mode) immediately after the call to `onentry()`. - * Set to suppress this behavior. + * Set `noResume: true` to suppress this behavior. * * Note that when this is set, the stream will never complete until the * data is consumed somehow. + * + * Set automatically in extract operations, since the entry is piped to + * a file system entry right away. Only relevant when parsing. */ noResume?: boolean @@ -234,6 +284,11 @@ export interface TarOptions { */ onentry?: (entry: ReadEntry) => any + /** + * Pack the targets of symbolic links rather than the link itself. + */ + follow?: boolean + /** * When creating archives, omit any metadata that is system-specific: * `ctime`, `atime`, `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and @@ -256,9 +311,9 @@ export interface TarOptions { noDirRecurse?: boolean /** - * Suppress Pax extended headers. Note that this means long paths and - * linkpaths will be truncated, and large or negative numeric values may be - * interpreted incorrectly. + * Suppress Pax extended headers when creating archives. Note that this means + * long paths and linkpaths will be truncated, and large or negative numeric + * values may be interpreted incorrectly. */ noPax?: boolean @@ -266,6 +321,10 @@ export interface TarOptions { * Set to a `Date` object to force a specific `mtime` value for everything * written to an archive. * + * This is useful when creating archives that are intended to be + * deterministic based on their contents, irrespective of the file's last + * modification time. + * * Overridden by `noMtime`. */ mtime?: Date @@ -278,6 +337,9 @@ export interface TarOptions { /** * The mode to set on any created file archive, defaults to 0o666 * masked by the process umask, often resulting in 0o644. + * + * This does *not* affect the mode fields of individual entries, or the + * mode status of extracted entries on the filesystem. */ mode?: number @@ -286,6 +348,7 @@ export interface TarOptions { /** * A cache of mtime values, to avoid having to stat the same file repeatedly. + * * @internal */ mtimeCache?: Map @@ -305,7 +368,8 @@ export interface TarOptions { umask?: number /** - * default mode for directories + * Default mode for directories. Used for all implicitly created directories, + * and any directories in the archive that do not have a mode field. * * @internal */ @@ -366,7 +430,7 @@ export interface TarOptions { /** * Automatically set to true on Windows systems. * - * When unpacking, causes behavior where filenames containing `<|>?:` + * When extracting, causes behavior where filenames containing `<|>?:` * characters are converted to windows-compatible escape sequences in the * created filesystem entries. * @@ -411,25 +475,140 @@ export type TarOptionsSyncFile = TarOptionsSync & TarOptionsFile export type LinkCacheKey = `${number}:${number}` export interface TarOptionsWithAliases extends TarOptions { + /** + * The effective current working directory for this tar command + */ C?: TarOptions['cwd'] + /** + * The tar file to be read and/or written. When this is set, a stream + * is not returned. Asynchronous commands will return a promise indicating + * when the operation is completed, and synchronous commands will return + * immediately. + */ f?: TarOptions['file'] + /** + * When creating a tar archive, this can be used to compress it as well. + * Set to `true` to use the default gzip options, or customize them as + * needed. + * + * When reading, if this is unset, then the compression status will be + * inferred from the archive data. This is generally best, unless you are + * sure of the compression settings in use to create the archive, and want to + * fail if the archive doesn't match expectations. + */ z?: TarOptions['gzip'] + /** + * When creating archives, preserve absolute and `..` paths in the archive, + * rather than sanitizing them under the cwd. + * + * When extracting, allow absolute paths, paths containing `..`, and + * extracting through symbolic links. By default, the root `/` is stripped + * from absolute paths (eg, turning `/x/y/z` into `x/y/z`), paths containing + * `..` are not extracted, and any file whose location would be modified by a + * symbolic link is not extracted. + * + * **WARNING** This is almost always unsafe, and must NEVER be used on + * archives from untrusted sources, such as user input, and every entry must + * be validated to ensure it is safe to write. Even if the input is not + * malicious, mistakes can cause a lot of damage! + */ P?: TarOptions['preservePaths'] + /** + * When extracting, unlink files before creating them. Without this option, + * tar overwrites existing files, which preserves existing hardlinks. With + * this option, existing hardlinks will be broken, as will any symlink that + * would affect the location of an extracted file. + */ U?: TarOptions['unlink'] + /** + * When extracting, strip the specified number of path portions from the + * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be + * extracted to `{cwd}/c/d`. + */ 'strip-components'?: TarOptions['strip'] + /** + * When extracting, strip the specified number of path portions from the + * entry path. For example, with `{strip: 2}`, the entry `a/b/c/d` would be + * extracted to `{cwd}/c/d`. + */ stripComponents?: TarOptions['strip'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ 'keep-newer'?: TarOptions['newer'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ keepNewer?: TarOptions['newer'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ 'keep-newer-files'?: TarOptions['newer'] + /** + * When extracting, keep the existing file on disk if it's newer than the + * file in the archive. + */ keepNewerFiles?: TarOptions['newer'] + /** + * When extracting, do not overwrite existing files at all. + */ k?: TarOptions['keep'] + /** + * When extracting, do not overwrite existing files at all. + */ 'keep-existing'?: TarOptions['keep'] + /** + * When extracting, do not overwrite existing files at all. + */ keepExisting?: TarOptions['keep'] + /** + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ m?: TarOptions['noMtime'] + /** + * When extracting, do not set the `mtime` value for extracted entries to + * match the `mtime` in the archive. + * + * When creating archives, do not store the `mtime` value in the entry. Note + * that this prevents properly using other mtime-based features (such as + * `tar.update` or the `newer` option) with the resulting archive. + */ 'no-mtime'?: TarOptions['noMtime'] + /** + * When extracting, set the `uid` and `gid` of extracted entries to the `uid` + * and `gid` fields in the archive. Defaults to true when run as root, and + * false otherwise. + * + * If false, then files and directories will be set with the owner and group + * of the user running the process. This is similar to `-p` in `tar(1)`, but + * ACLs and other system-specific data is never unpacked in this + * implementation, and modes are set by default already. + */ p?: TarOptions['preserveOwner'] + /** + * Pack the targets of symbolic links rather than the link itself. + */ L?: TarOptions['follow'] + /** + * Pack the targets of symbolic links rather than the link itself. + */ h?: TarOptions['follow'] + + /** + * Deprecated option. Set explicitly false to set `chmod: true`. Ignored + * if {@link TarOptions#chmod} is set to any boolean value. + * + * @deprecated + */ + noChmod?: boolean } export type TarOptionsWithAliasesSync = TarOptionsWithAliases & { @@ -469,5 +648,10 @@ export const dealias = ( const k = dealiasKey(key) result[k] = v } + // affordance for deprecated noChmod -> chmod + if (result.chmod === undefined && result.noChmod === false) { + result.chmod = true + } + delete result.noChmod return result as TarOptions } diff --git a/src/unpack.ts b/src/unpack.ts index 26e857cc..933975ff 100644 --- a/src/unpack.ts +++ b/src/unpack.ts @@ -172,7 +172,7 @@ export class Unpack extends Parser { umask: number dmode: number fmode: number - noChmod: boolean + chmod: boolean constructor(opt: TarOptions = {}) { opt.ondone = () => { @@ -185,7 +185,7 @@ export class Unpack extends Parser { this.transform = opt.transform this.dirCache = opt.dirCache || new Map() - this.noChmod = !!opt.noChmod + this.chmod = !!opt.chmod if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { // need both or neither @@ -269,7 +269,11 @@ export class Unpack extends Parser { ) this.strip = Number(opt.strip) || 0 // if we're not chmodding, then we don't need the process umask - this.processUmask = opt.noChmod ? 0 : process.umask() + this.processUmask = !this.chmod + ? 0 + : typeof opt.processUmask === 'number' + ? opt.processUmask + : process.umask() this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask @@ -469,7 +473,6 @@ export class Unpack extends Parser { cache: this.dirCache, cwd: this.cwd, mode: mode, - noChmod: this.noChmod, }, cb, ) @@ -777,7 +780,7 @@ export class Unpack extends Parser { if (st.isDirectory()) { if (entry.type === 'Directory') { const needChmod = - !this.noChmod && + this.chmod && entry.mode && (st.mode & 0o7777) !== entry.mode const afterChmod = (er?: Error | null | undefined) => @@ -931,7 +934,7 @@ export class UnpackSync extends Unpack { if (st.isDirectory()) { if (entry.type === 'Directory') { const needChmod = - !this.noChmod && + this.chmod && entry.mode && (st.mode & 0o7777) !== entry.mode const [er] = needChmod @@ -1088,7 +1091,6 @@ export class UnpackSync extends Unpack { cache: this.dirCache, cwd: this.cwd, mode: mode, - noChmod: this.noChmod, }) } catch (er) { return er diff --git a/test/options.js b/test/options.js index 5e10df5b..7aa01caf 100644 --- a/test/options.js +++ b/test/options.js @@ -51,6 +51,9 @@ t.same( }, ) +t.same(dealias({ noChmod: false }), { chmod: true }) +t.same(dealias({ noChmod: true }), {}) + t.equal(isSyncFile(dealias({ sync: true, f: 'x' })), true) t.equal(isSyncFile(dealias({ file: 'x' })), false) t.equal(isSyncFile(dealias({ sync: true })), false) diff --git a/test/unpack.js b/test/unpack.js index 51ba1220..2b0ea269 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -1,5 +1,3 @@ -process.umask(0o022) - import { Unpack, UnpackSync } from '../dist/esm/unpack.js' import fs from 'fs' @@ -36,6 +34,8 @@ const isLongFile = f => t.teardown(_ => rimraf(unpackdir)) +t.capture(process, 'umask', () => 0o22) + t.before(async () => { await rimraf(unpackdir) await mkdirp(unpackdir) @@ -640,6 +640,7 @@ t.test( onwarn: (c, w, d) => { warnings.push([c, w, d]) }, + chmod: true, }) u.on('close', _ => { t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) @@ -676,6 +677,8 @@ t.test( onwarn: (c, w, d) => { warnings.push([c, w, d]) }, + chmod: true, + processUmask: 0o22, }) u.end(data) t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) @@ -1395,6 +1398,8 @@ t.test('fail chmod', t => { new Unpack({ cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), + chmod: true, + processUmask: 0o22, }) .on('close', _ => check(t, expect)) .end(data) @@ -1405,6 +1410,8 @@ t.test('fail chmod', t => { new UnpackSync({ cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), + chmod: true, + processUmask: 0o22, }).end(data) check(t, expect) }) @@ -2112,8 +2119,6 @@ t.test('safely transmute chars on windows with absolutes', t => { }) t.test('use explicit chmod when required by umask', t => { - process.umask(0o022) - const basedir = path.resolve(unpackdir, 'umask-chmod') const data = makeTar([ @@ -2135,26 +2140,31 @@ t.test('use explicit chmod when required by umask', t => { t.test('async', t => { mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir }) + const unpack = new Unpack({ + cwd: basedir, + chmod: true, + processUmask: 0o22, + }) unpack.on('close', _ => check(t)) unpack.end(data) }) return t.test('sync', t => { mkdirp.sync(basedir) - const unpack = new UnpackSync({ cwd: basedir }) + const unpack = new UnpackSync({ + cwd: basedir, + chmod: true, + processUmask: 0o22, + }) unpack.end(data) check(t) }) }) -t.test('dont use explicit chmod if noChmod flag set', t => { - process.umask(0o022) - const { umask } = process - t.teardown(() => (process.umask = umask)) - process.umask = () => { +t.test('dont use explicit chmod if chmod flag not set', t => { + t.capture(process, 'umask', () => { throw new Error('should not call process.umask()') - } + }) const basedir = path.resolve(unpackdir, 'umask-no-chmod') @@ -2177,14 +2187,14 @@ t.test('dont use explicit chmod if noChmod flag set', t => { t.test('async', t => { mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir, noChmod: true }) + const unpack = new Unpack({ cwd: basedir }) unpack.on('close', _ => check(t)) unpack.end(data) }) return t.test('sync', t => { mkdirp.sync(basedir) - const unpack = new UnpackSync({ cwd: basedir, noChmod: true }) + const unpack = new UnpackSync({ cwd: basedir }) unpack.end(data) check(t) }) From 578f34f3522ce248bfa201b38d01c83b50d9922e Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 08:59:31 -0700 Subject: [PATCH 04/12] tighten up changelog entries --- CHANGELOG.md | 52 ++++++++++++++++++---------------------------------- 1 file changed, 18 insertions(+), 34 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c8c81472..85ae0acb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,48 +21,31 @@ - Add `maxDepth` option to prevent extraction into excessively deep folders. -## 6.1.15 +## 6.1 -- Normalize unicode internally using NFD - -## 6.1.14 - -- Update minipass dependency - -## [6.1.13](https://github.com/npm/node-tar/compare/v6.1.12...v6.1.13) (2022-12-07) - -### Dependencies - -- [`cc4e0dd`](https://github.com/npm/node-tar/commit/cc4e0ddfe523a0bce383846a67442c637a65d486) [#343](https://github.com/npm/node-tar/pull/343) bump minipass from 3.3.6 to 4.0.0 - -## [6.1.12](https://github.com/npm/node-tar/compare/v6.1.11...v6.1.12) (2022-10-31) - -### Bug Fixes - -- [`57493ee`](https://github.com/npm/node-tar/commit/57493ee66ece50d62114e02914282fc37be3a91a) [#332](https://github.com/npm/node-tar/pull/332) ensuring close event is emited after stream has ended (@webark) -- [`b003c64`](https://github.com/npm/node-tar/commit/b003c64f624332e24e19b30dc011069bb6708680) [#314](https://github.com/npm/node-tar/pull/314) replace deprecated String.prototype.substr() (#314) (@CommanderRoot, @lukekarrys) - -### Documentation - -- [`f129929`](https://github.com/npm/node-tar/commit/f12992932f171ea248b27fad95e7d489a56d31ed) [#313](https://github.com/npm/node-tar/pull/313) remove dead link to benchmarks (#313) (@yetzt) -- [`c1faa9f`](https://github.com/npm/node-tar/commit/c1faa9f44001dfb0bc7638b2850eb6058bd56a4a) add examples/explanation of using tar.t (@isaacs) +- remove dead link to benchmarks (#313) (@yetzt) +- add examples/explanation of using tar.t (@isaacs) +- ensure close event is emited after stream has ended (@webark) +- replace deprecated String.prototype.substr() (@CommanderRoot, + @lukekarrys) ## 6.0 - Drop support for node 6 and 8 -- fix symlinks and hardlinks on windows being packed with `\`-style path - targets +- fix symlinks and hardlinks on windows being packed with + `\`-style path targets ## 5.0 - Address unpack race conditions using path reservations - Change large-numbers errors from TypeError to Error - Add `TAR_*` error codes -- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries - found in an archive +- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid + entries found in an archive - do not treat ignored entries as an invalid archive - drop support for node v4 -- unpack: conditionally use a file mapping to write files on Windows +- unpack: conditionally use a file mapping to write files on + Windows - Set more portable 'mode' value in portable mode - Set `portable` gzip option in portable mode @@ -94,8 +77,8 @@ ## 3.1 -- Support `@file.tar` as an entry argument to copy entries from one tar - file to another. +- Support `@file.tar` as an entry argument to copy entries from + one tar file to another. - Add `noPax` option - `noResume` option for tar.t - win32: convert `>| Date: Wed, 10 Apr 2024 09:15:52 -0700 Subject: [PATCH 05/12] ci: update versions --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1e477a87..7a400a47 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ jobs: build: strategy: matrix: - node-version: [16.x, 18.x, 20.x] + node-version: [18.x, 20.x, 21.x] platform: - os: ubuntu-latest shell: bash @@ -21,10 +21,10 @@ jobs: steps: - name: Checkout Repository - uses: actions/checkout@v1.1.0 + uses: actions/checkout@v4 - name: Use Nodejs ${{ matrix.node-version }} - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: ${{ matrix.node-version }} @@ -32,4 +32,4 @@ jobs: run: npm install - name: Run Tests - run: npm test -- -c -t0 + run: npm test -- -c From ae9ce7ec2adb6300155cb6a46f8c9ea601330d81 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 09:34:54 -0700 Subject: [PATCH 06/12] test: fix normalize-unicode coverage on linux --- .../test/normalize-unicode.js-win32.test.cjs | 30 +++++++ .../test/normalize-unicode.js.test.cjs | 30 ------- test/normalize-unicode.js | 82 ++++++++++--------- 3 files changed, 74 insertions(+), 68 deletions(-) create mode 100644 tap-snapshots/test/normalize-unicode.js-win32.test.cjs delete mode 100644 tap-snapshots/test/normalize-unicode.js.test.cjs diff --git a/tap-snapshots/test/normalize-unicode.js-win32.test.cjs b/tap-snapshots/test/normalize-unicode.js-win32.test.cjs new file mode 100644 index 00000000..85e353d8 --- /dev/null +++ b/tap-snapshots/test/normalize-unicode.js-win32.test.cjs @@ -0,0 +1,30 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "\\\\\eee\\\\\\" > normalized 1`] = ` +\\\\\eee\\\\\\ +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "\\\\a\\\\b\\\\c\\\\d\\\\" > normalized 1`] = ` +/a/b/c/d +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "﹨aaaa﹨dddd﹨" > normalized 1`] = ` +﹨aaaa﹨dddd﹨ +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "\bbb\eee\" > normalized 1`] = ` +\bbb\eee\ +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "1/4foo.txt" > normalized 1`] = ` +1/4foo.txt +` + +exports[`test/normalize-unicode.js win32 > TAP > normalize with strip slashes > "¼foo.txt" > normalized 1`] = ` +¼foo.txt +` diff --git a/tap-snapshots/test/normalize-unicode.js.test.cjs b/tap-snapshots/test/normalize-unicode.js.test.cjs deleted file mode 100644 index 6c7be82a..00000000 --- a/tap-snapshots/test/normalize-unicode.js.test.cjs +++ /dev/null @@ -1,30 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "\\\\\eee\\\\\\" > normalized 1`] = ` -\\\\\eee\\\\\\ -` - -exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "\\\\a\\\\b\\\\c\\\\d\\\\" > normalized 1`] = ` -/a/b/c/d -` - -exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "﹨aaaa﹨dddd﹨" > normalized 1`] = ` -﹨aaaa﹨dddd﹨ -` - -exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "\bbb\eee\" > normalized 1`] = ` -\bbb\eee\ -` - -exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "1/4foo.txt" > normalized 1`] = ` -1/4foo.txt -` - -exports[`test/normalize-unicode.js > TAP > normalize with strip slashes > "¼foo.txt" > normalized 1`] = ` -¼foo.txt -` diff --git a/test/normalize-unicode.js b/test/normalize-unicode.js index 969ee6ca..ae3efd4d 100644 --- a/test/normalize-unicode.js +++ b/test/normalize-unicode.js @@ -1,16 +1,11 @@ import t from 'tap' +import { fileURLToPath } from 'url' +import { normalizeUnicode } from '../dist/esm/normalize-unicode.js' +import { stripTrailingSlashes } from '../dist/esm/strip-trailing-slashes.js' +import { normalizeWindowsPath } from '../dist/esm/normalize-windows-path.js' -process.env.TESTING_TAR_FAKE_PLATFORM = 'win32' - -const [ - { normalizeUnicode }, - { stripTrailingSlashes }, - { normalizeWindowsPath }, -] = await Promise.all([ - import('../dist/esm/normalize-unicode.js'), - import('../dist/esm/strip-trailing-slashes.js'), - import('../dist/esm/normalize-windows-path.js'), -]) +const __filename = fileURLToPath(import.meta.url) +const fakePlatform = process.env.TESTING_TAR_FAKE_PLATFORM // café const cafe1 = Buffer.from([0x63, 0x61, 0x66, 0xc3, 0xa9]).toString() @@ -28,30 +23,41 @@ t.equal( t.equal(normalizeUnicode(cafe1), normalizeUnicode(cafe2), 'cached') t.equal(normalizeUnicode('foo'), 'foo', 'non-unicode string') -t.test('normalize with strip slashes', t => { - const paths = [ - '\\a\\b\\c\\d\\', - '﹨aaaa﹨dddd﹨', - '\bbb\eee\', - '\\\\\eee\\\\\\', - '¼foo.txt', - '1/4foo.txt', - ] - - t.plan(paths.length) - - for (const path of paths) { - t.test(JSON.stringify(path), t => { - const a = normalizeUnicode( - stripTrailingSlashes(normalizeWindowsPath(path)), - ) - const b = stripTrailingSlashes( - normalizeWindowsPath(normalizeUnicode(path)), - ) - t.matchSnapshot(a, 'normalized') - t.equal(a, b, 'order should not matter') - t.end() - }) - } - t.end() -}) +if (fakePlatform === 'win32') { + t.test('normalize with strip slashes', t => { + const paths = [ + '\\a\\b\\c\\d\\', + '﹨aaaa﹨dddd﹨', + '\bbb\eee\', + '\\\\\eee\\\\\\', + '¼foo.txt', + '1/4foo.txt', + ] + + t.plan(paths.length) + + for (const path of paths) { + t.test(JSON.stringify(path), t => { + const a = normalizeUnicode( + stripTrailingSlashes(normalizeWindowsPath(path)), + ) + const b = stripTrailingSlashes( + normalizeWindowsPath(normalizeUnicode(path)), + ) + t.matchSnapshot(a, 'normalized') + t.equal(a, b, 'order should not matter') + t.end() + }) + } + t.end() + }) +} + +if (fakePlatform !== 'win32') { + t.spawn(process.execPath, [__filename, 'win32'], { + env: { + ...process.env, + TESTING_TAR_FAKE_PLATFORM: 'win32', + } + }) +} From 23304160811cceb1a1949d3915d5a2a818726ec6 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 09:37:22 -0700 Subject: [PATCH 07/12] test: code style, prefer () to _ for empty fns --- test/unpack.js | 188 ++++++++++++++++++++++++------------------------- 1 file changed, 94 insertions(+), 94 deletions(-) diff --git a/test/unpack.js b/test/unpack.js index 2b0ea269..9e92ba85 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -32,7 +32,7 @@ const isWindows = process.platform === 'win32' const isLongFile = f => f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) -t.teardown(_ => rimraf(unpackdir)) +t.teardown(() => rimraf(unpackdir)) t.capture(process, 'umask', () => 0o22) @@ -43,7 +43,7 @@ t.before(async () => { t.test('basic file unpack tests', t => { const basedir = path.resolve(unpackdir, 'basic') - t.teardown(_ => rimraf(basedir)) + t.teardown(() => rimraf(basedir)) const cases = { 'emptypax.tar': { @@ -127,12 +127,12 @@ t.test('basic file unpack tests', t => { t.test('strict', t => { const unpack = new Unpack({ cwd: linkdir, strict: true }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) t.test('loose', t => { const unpack = new Unpack({ cwd: linkdir }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) }) @@ -212,11 +212,11 @@ t.test('links!', t => { t.test('async', t => { const unpack = new Unpack({ cwd: dir }) let finished = false - unpack.on('finish', _ => (finished = true)) - unpack.on('close', _ => + unpack.on('finish', () => (finished = true)) + unpack.on('close', () => t.ok(finished, 'emitted finish before close'), ) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -235,11 +235,11 @@ t.test('links!', t => { t.test('async strip', t => { const unpack = new Unpack({ cwd: dir, strip: 1 }) let finished = false - unpack.on('finish', _ => (finished = true)) - unpack.on('close', _ => + unpack.on('finish', () => (finished = true)) + unpack.on('close', () => t.ok(finished, 'emitted finish before close'), ) - unpack.on('close', _ => checkForStrip(t)) + unpack.on('close', () => checkForStrip(t)) unpack.end(stripData) }) @@ -252,11 +252,11 @@ t.test('links!', t => { t.test('async strip 3', t => { const unpack = new Unpack({ cwd: dir, strip: 3 }) let finished = false - unpack.on('finish', _ => (finished = true)) - unpack.on('close', _ => + unpack.on('finish', () => (finished = true)) + unpack.on('close', () => t.ok(finished, 'emitted finish before close'), ) - unpack.on('close', _ => checkForStrip3(t)) + unpack.on('close', () => checkForStrip3(t)) unpack.end(stripData) }) }) @@ -267,7 +267,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.plan(6) mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) t.beforeEach(() => { // clobber this junk @@ -296,11 +296,11 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async', t => { const unpack = new Unpack({ cwd: dir }) let prefinished = false - unpack.on('prefinish', _ => (prefinished = true)) - unpack.on('finish', _ => + unpack.on('prefinish', () => (prefinished = true)) + unpack.on('finish', () => t.ok(prefinished, 'emitted prefinish before finish'), ) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -312,7 +312,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async again', t => { const unpack = new Unpack({ cwd: dir }) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) unpack.end(data) }) @@ -324,7 +324,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('async unlink', t => { const unpack = new Unpack({ cwd: dir, unlink: true }) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -338,7 +338,7 @@ t.test('links without cleanup (exercise clobbering code)', t => { t.test('nested dir dupe', t => { const dir = path.resolve(unpackdir, 'nested-dir') mkdirp.sync(dir + '/d/e/e/p') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const expect = { 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': @@ -366,7 +366,7 @@ t.test('nested dir dupe', t => { // while we're at it, why not use gzip too? const zip = new z.Gzip() zip.pipe(unpack) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) zip.end(data) }) @@ -378,7 +378,7 @@ t.test( t => { const dir = path.resolve(unpackdir, 'symlink-junk') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -437,7 +437,7 @@ t.test( cwd: dir, onwarn: (c, w, d) => warnings.push([c, w, d]), }) - u.on('close', _ => { + u.on('close', () => { t.equal( fs.lstatSync(dir + '/d/i').mode & 0o7777, isWindows ? 0o666 : 0o755, @@ -452,7 +452,7 @@ t.test( fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) } t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') if (!isWindows) { @@ -491,7 +491,7 @@ t.test( fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) } t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') @@ -514,7 +514,7 @@ t.test( onwarn: (c, w, d) => warnings.push([c, w, d]), preservePaths: true, }) - u.on('close', _ => { + u.on('close', () => { t.same(warnings, []) t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') @@ -565,7 +565,7 @@ t.test( onwarn: (c, w, d) => warnings.push([c, w, d]), unlink: true, }) - u.on('close', _ => { + u.on('close', () => { t.same(warnings, []) t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') @@ -596,7 +596,7 @@ t.test( onwarn: (c, w, d) => warnings.push([c, w, d]), unlink: true, }) - u.on('close', _ => { + u.on('close', () => { t.same(warnings, [['TAR_ENTRY_ERROR', 'poop', poop]]) reset() t.end() @@ -642,14 +642,14 @@ t.test( }, chmod: true, }) - u.on('close', _ => { + u.on('close', () => { t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') t.ok( fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') t.equal( @@ -687,7 +687,7 @@ t.test( fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(_ => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') t.equal( @@ -709,7 +709,7 @@ t.test( t.test('unsupported entries', t => { const dir = path.resolve(unpackdir, 'unsupported-entries') mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const unknown = new Header({ path: 'qux', size: 4 }) unknown.encode() unknown.block?.write('Z', 156) @@ -765,7 +765,7 @@ t.test('unsupported entries', t => { }, ], ] - u.on('close', _ => { + u.on('close', () => { t.equal(fs.readdirSync(dir).length, 0) t.match(warnings, expect) t.end() @@ -782,7 +782,7 @@ t.test('unsupported entries', t => { onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.on('error', e => errors.push(e)) - u.on('close', _ => { + u.on('close', () => { t.equal(fs.readdirSync(dir).length, 0) t.same(warnings, []) t.match(errors, [ @@ -810,7 +810,7 @@ t.test('unsupported entries', t => { t.test('file in dir path', t => { const dir = path.resolve(unpackdir, 'file-junk') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -842,14 +842,14 @@ t.test('file in dir path', t => { t.test('fail because of file', t => { const check = t => { t.equal(fs.readFileSync(dir + '/d/i/r/file', 'utf8'), 'a') - t.throws(_ => fs.statSync(dir + '/d/i/r/file/a/b/c')) + t.throws(() => fs.statSync(dir + '/d/i/r/file/a/b/c')) t.end() } t.plan(2) t.test('async', t => { - new Unpack({ cwd: dir }).on('close', _ => check(t)).end(data) + new Unpack({ cwd: dir }).on('close', () => check(t)).end(data) }) t.test('sync', t => { @@ -869,7 +869,7 @@ t.test('file in dir path', t => { t.test('async', t => { new Unpack({ cwd: dir, unlink: true }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -885,7 +885,7 @@ t.test('file in dir path', t => { t.test('set umask option', t => { const dir = path.resolve(unpackdir, 'umask') mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const data = makeTar([ { @@ -901,7 +901,7 @@ t.test('set umask option', t => { umask: 0o027, cwd: dir, }) - .on('close', _ => { + .on('close', () => { t.equal( fs.statSync(dir + '/d/i/r').mode & 0o7777, isWindows ? 0o666 : 0o750, @@ -917,7 +917,7 @@ t.test('set umask option', t => { t.test('absolute paths', t => { const dir = path.join(unpackdir, 'absolute-paths') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -970,7 +970,7 @@ t.test('absolute paths', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1018,7 +1018,7 @@ t.test('absolute paths', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1040,7 +1040,7 @@ t.test('absolute paths', t => { t.test('.. paths', t => { const dir = path.join(unpackdir, 'dotted-paths') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -1072,7 +1072,7 @@ t.test('.. paths', t => { { path: dotted, code: 'TAR_ENTRY_ERROR' }, ], ]) - t.throws(_ => fs.lstatSync(resolved)) + t.throws(() => fs.lstatSync(resolved)) t.end() } @@ -1085,7 +1085,7 @@ t.test('.. paths', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1123,7 +1123,7 @@ t.test('.. paths', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1251,7 +1251,7 @@ t.test('fail all stats', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t, expect)) + .on('close', () => check(t, expect)) .end(data) }) @@ -1289,7 +1289,7 @@ t.test('fail symlink', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('symlink', poop) const dir = path.join(unpackdir, 'symlink-fail') - t.teardown(async _ => { + t.teardown(async () => { unmutate() await rimraf(dir) }) @@ -1334,7 +1334,7 @@ t.test('fail symlink', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t, expect)) + .on('close', () => check(t, expect)) .end(data) }) @@ -1355,7 +1355,7 @@ t.test('fail chmod', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('chmod', poop) const dir = path.join(unpackdir, 'chmod-fail') - t.teardown(async _ => { + t.teardown(async () => { unmutate() await rimraf(dir) }) @@ -1401,7 +1401,7 @@ t.test('fail chmod', t => { chmod: true, processUmask: 0o22, }) - .on('close', _ => check(t, expect)) + .on('close', () => check(t, expect)) .end(data) }) @@ -1424,7 +1424,7 @@ t.test('fail mkdir', t => { poop.code = 'EPOOP' let unmutate const dir = path.join(unpackdir, 'mkdir-fail') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const warnings = [] t.beforeEach(async () => { @@ -1478,7 +1478,7 @@ t.test('fail mkdir', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1490,7 +1490,7 @@ t.test('fail write', t => { poop.code = 'EPOOP' const unmutate = mutateFS.fail('write', poop) const dir = path.join(unpackdir, 'write-fail') - t.teardown(async _ => { + t.teardown(async () => { unmutate() await rimraf(dir) }) @@ -1528,7 +1528,7 @@ t.test('fail write', t => { cwd: dir, onwarn: (_c, w, d) => warnings.push([w, d]), }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1545,7 +1545,7 @@ t.test('fail write', t => { t.test('skip existing', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const date = new Date('2011-03-27T22:16:31.000Z') t.beforeEach(async () => { @@ -1582,7 +1582,7 @@ t.test('skip existing', t => { cwd: dir, keep: true, }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1599,7 +1599,7 @@ t.test('skip existing', t => { t.test('skip newer', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const date = new Date('2013-12-19T17:00:00.000Z') t.beforeEach(async () => { @@ -1636,7 +1636,7 @@ t.test('skip newer', t => { cwd: dir, newer: true, }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1653,7 +1653,7 @@ t.test('skip newer', t => { t.test('no mtime', t => { const dir = path.join(unpackdir, 'skip-newer') - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) @@ -1702,7 +1702,7 @@ t.test('no mtime', t => { cwd: dir, noMtime: true, }) - .on('close', _ => check(t)) + .on('close', () => check(t)) .end(data) }) @@ -1720,7 +1720,7 @@ t.test('no mtime', t => { t.test('unpack big enough to pause/drain', t => { const dir = path.resolve(unpackdir, 'drain-clog') mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const stream = fs.createReadStream(fixtures + '/parses.tar') const u = new Unpack({ cwd: dir, @@ -1732,7 +1732,7 @@ t.test('unpack big enough to pause/drain', t => { t.fail('should not get ignored entry: ' + entry.path), ) - u.on('close', _ => { + u.on('close', () => { t.pass('extraction finished') const actual = fs.readdirSync(dir) const expected = fs.readdirSync(parses) @@ -1749,17 +1749,17 @@ t.test('set owner', t => { const myGid = 1024 const getuid = process.getuid const getgid = process.getgid - process.getuid = _ => myUid - process.getgid = _ => myGid + process.getuid = () => myUid + process.getgid = () => myGid t.teardown( - _ => ((process.getuid = getuid), (process.getgid = getgid)), + () => ((process.getuid = getuid), (process.getgid = getgid)), ) // can't actually do this because it requires root, but we can // verify that chown gets called. t.test('as root, defaults to true', t => { const getuid = process.getuid - process.getuid = _ => 0 + process.getuid = () => 0 const u = new Unpack() t.equal(u.preserveOwner, true, 'preserveOwner enabled') process.getuid = getuid @@ -1768,7 +1768,7 @@ t.test('set owner', t => { t.test('as non-root, defaults to false', t => { const getuid = process.getuid - process.getuid = _ => 501 + process.getuid = () => 501 const u = new Unpack() t.equal(u.preserveOwner, false, 'preserveOwner disabled') process.getuid = getuid @@ -1855,7 +1855,7 @@ t.test('set owner', t => { t.test('sync', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) let warned = false const u = new UnpackSync({ cwd: dir, @@ -1873,7 +1873,7 @@ t.test('set owner', t => { t.test('async', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) let warned = false const u = new Unpack({ cwd: dir, @@ -1908,9 +1908,9 @@ t.test('set owner', t => { called++ cb() } - fs.chownSync = fs.lchownSync = fs.fchownSync = _ => called++ + fs.chownSync = fs.lchownSync = fs.fchownSync = () => called++ - t.teardown(_ => { + t.teardown(() => { fs.chown = chown fs.fchown = fchown fs.lchown = lchown @@ -1921,7 +1921,7 @@ t.test('set owner', t => { t.test('sync', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) called = 0 const u = new UnpackSync({ cwd: dir, preserveOwner: true }) u.end(data) @@ -1931,11 +1931,11 @@ t.test('set owner', t => { t.test('async', t => { mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) called = 0 const u = new Unpack({ cwd: dir, preserveOwner: true }) u.end(data) - u.on('close', _ => { + u.on('close', () => { t.ok(called >= 5, 'called chowns') t.end() }) @@ -1950,7 +1950,7 @@ t.test('set owner', t => { const un = mutateFS.fail('chown', poop) const unf = mutateFS.fail('fchown', poop) const unl = mutateFS.fail('lchown', poop) - t.teardown(async _ => { + t.teardown(async () => { un() unf() unl() @@ -1985,7 +1985,7 @@ t.test('set owner', t => { t.test('async', t => { const u = new Unpack({ cwd: dir, preserveOwner: false }) u.end(data) - u.on('close', _ => check(t)) + u.on('close', () => check(t)) }) t.end() @@ -2033,7 +2033,7 @@ t.test('unpack when dir is not writable', t => { t.test('async', t => { const u = new Unpack({ cwd: dir, strict: true }) u.end(data) - u.on('close', _ => check(t)) + u.on('close', () => check(t)) }) t.end() @@ -2071,7 +2071,7 @@ t.test('transmute chars on windows', t => { win32: true, }) u.end(data) - u.on('close', _ => check(t)) + u.on('close', () => check(t)) }) t.test('sync', t => { @@ -2145,7 +2145,7 @@ t.test('use explicit chmod when required by umask', t => { chmod: true, processUmask: 0o22, }) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -2188,7 +2188,7 @@ t.test('dont use explicit chmod if chmod flag not set', t => { t.test('async', t => { mkdirp.sync(basedir) const unpack = new Unpack({ cwd: basedir }) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) @@ -2213,7 +2213,7 @@ t.test('chown implicit dirs and also the entries', t => { const getuid = process.getuid const getgid = process.getgid - t.teardown(_ => { + t.teardown(() => { fs.chown = chown fs.chownSync = chownSync fs.lchown = lchown @@ -2278,15 +2278,15 @@ t.test('chown implicit dirs and also the entries', t => { t.test('throws when setting uid/gid improperly', t => { t.throws( - _ => new Unpack({ uid: 420 }), + () => new Unpack({ uid: 420 }), TypeError('cannot set owner without number uid and gid'), ) t.throws( - _ => new Unpack({ gid: 666 }), + () => new Unpack({ gid: 666 }), TypeError('cannot set owner without number uid and gid'), ) t.throws( - _ => new Unpack({ uid: 1, gid: 2, preserveOwner: true }), + () => new Unpack({ uid: 1, gid: 2, preserveOwner: true }), TypeError( 'cannot preserve owner in archive and also set owner explicitly', ), @@ -2304,7 +2304,7 @@ t.test('chown implicit dirs and also the entries', t => { uid: 420, gid: 666, }) - unpack.on('close', _ => check(t)) + unpack.on('close', () => check(t)) unpack.end(data) }) .then( @@ -2342,7 +2342,7 @@ t.test('chown implicit dirs and also the entries', t => { t.test('bad cwd setting', t => { const basedir = path.resolve(unpackdir, 'bad-cwd') mkdirp.sync(basedir) - t.teardown(_ => rimraf(basedir)) + t.teardown(() => rimraf(basedir)) const cases = [ // the cwd itself @@ -2383,7 +2383,7 @@ t.test('bad cwd setting', t => { const cwd = basedir + '/file' const opt = { cwd: cwd } - t.throws(_ => new UnpackSync(opt).end(data), { + t.throws(() => new UnpackSync(opt).end(data), { name: 'CwdError', message: "ENOTDIR: Cannot cd into '" + normPath(cwd) + "'", path: normPath(cwd), @@ -2408,7 +2408,7 @@ t.test('bad cwd setting', t => { const cwd = basedir + '/asdf/asdf/asdf' const opt = { cwd: cwd } - t.throws(_ => new UnpackSync(opt).end(data), { + t.throws(() => new UnpackSync(opt).end(data), { name: 'CwdError', message: "ENOENT: Cannot cd into '" + normPath(cwd) + "'", path: normPath(cwd), @@ -2436,7 +2436,7 @@ t.test('bad cwd setting', t => { t.test('transform', t => { const basedir = path.resolve(unpackdir, 'transform') - t.teardown(_ => rimraf(basedir)) + t.teardown(() => rimraf(basedir)) const cases = { 'emptypax.tar': { @@ -2514,12 +2514,12 @@ t.test('transform', t => { transform: txFn, }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) t.test('loose', t => { const unpack = new Unpack({ cwd: dir, transform: txFn }) fs.createReadStream(tf).pipe(unpack) - eos(unpack, _ => check(t)) + eos(unpack, () => check(t)) }) }) @@ -2547,7 +2547,7 @@ t.test('transform', t => { t.test('transform error', t => { const dir = path.resolve(unpackdir, 'transform-error') mkdirp.sync(dir) - t.teardown(_ => rimraf(dir)) + t.teardown(() => rimraf(dir)) const tarfile = path.resolve(tars, 'body-byte-counts.tar') const tardata = fs.readFileSync(tarfile) From b3afdbb26496fe42110b3708b8f75c4ca4c853b7 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 10:23:23 -0700 Subject: [PATCH 08/12] unpack test: use modern tap features This replaces a lot of manual mkdir/rimraf with t.testdir() --- test/unpack.js | 590 +++++++++++++++++++++---------------------------- 1 file changed, 254 insertions(+), 336 deletions(-) diff --git a/test/unpack.js b/test/unpack.js index 9e92ba85..f7c4f88a 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -15,7 +15,6 @@ const __dirname = path.dirname(__filename) const fixtures = path.resolve(__dirname, 'fixtures') const tars = path.resolve(fixtures, 'tars') const parses = path.resolve(fixtures, 'parse') -const unpackdir = path.resolve(fixtures, 'unpack') import eos from 'end-of-stream' import { mkdirp } from 'mkdirp' @@ -32,19 +31,9 @@ const isWindows = process.platform === 'win32' const isLongFile = f => f.match(/r.e.a.l.l.y.-.d.e.e.p.-.f.o.l.d.e.r.-.p.a.t.h/) -t.teardown(() => rimraf(unpackdir)) - t.capture(process, 'umask', () => 0o22) -t.before(async () => { - await rimraf(unpackdir) - await mkdirp(unpackdir) -}) - t.test('basic file unpack tests', t => { - const basedir = path.resolve(unpackdir, 'basic') - t.teardown(() => rimraf(basedir)) - const cases = { 'emptypax.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', @@ -99,13 +88,11 @@ t.test('basic file unpack tests', t => { tarfiles.forEach(tarfile => { t.test(tarfile, t => { const tf = path.resolve(tars, tarfile) - const dir = path.resolve(basedir, tarfile) - const linkdir = path.resolve(basedir, tarfile + '.link') + const dir = t.testdir({}) + const linkdir = dir + '.link' t.beforeEach(async () => { - await rimraf(dir) await rimraf(linkdir) - await mkdirp(dir) - fs.symlinkSync(dir, linkdir, 'junction') + fs.symlinkSync(dir, linkdir) }) const check = t => { @@ -163,7 +150,7 @@ t.test('cwd default to process cwd', t => { }) t.test('links!', t => { - const dir = path.resolve(unpackdir, 'links') + const dir = t.testdir({}) const data = fs.readFileSync(tars + '/links.tar') const stripData = fs.readFileSync(tars + '/links-strip.tar') @@ -262,12 +249,10 @@ t.test('links!', t => { }) t.test('links without cleanup (exercise clobbering code)', t => { - const dir = path.resolve(unpackdir, 'links') + const dir = t.testdir({}) const data = fs.readFileSync(tars + '/links.tar') t.plan(6) - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) t.beforeEach(() => { // clobber this junk @@ -336,9 +321,8 @@ t.test('links without cleanup (exercise clobbering code)', t => { }) t.test('nested dir dupe', t => { - const dir = path.resolve(unpackdir, 'nested-dir') + const dir = t.testdir({}) mkdirp.sync(dir + '/d/e/e/p') - t.teardown(() => rimraf(dir)) const expect = { 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt': 'short\n', 'd/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc': @@ -376,14 +360,6 @@ t.test( skip: isWindows && 'symlinks not fully supported', }, t => { - const dir = path.resolve(unpackdir, 'symlink-junk') - - t.teardown(() => rimraf(dir)) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - const data = makeTar([ { path: 'd/i', @@ -433,26 +409,27 @@ t.test( t.test('no clobbering', t => { const warnings = [] + const cwd = t.testdir({}) const u = new Unpack({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.on('close', () => { t.equal( - fs.lstatSync(dir + '/d/i').mode & 0o7777, + fs.lstatSync(cwd + '/d/i').mode & 0o7777, isWindows ? 0o666 : 0o755, ) t.equal( - fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, + fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751, ) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') if (!isWindows) { t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) } t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') if (!isWindows) { @@ -464,8 +441,8 @@ t.test( name: 'SymlinkError', code: 'TAR_SYMLINK_ERROR', tarCode: 'TAR_ENTRY_ERROR', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', }) } t.equal(warnings.length, 1) @@ -476,22 +453,23 @@ t.test( t.test('no clobbering, sync', t => { const warnings = [] + const cwd = t.testdir({}) const u = new UnpackSync({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.end(data) t.equal( - fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, + fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751, ) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') if (!isWindows) { t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) } t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') @@ -501,33 +479,34 @@ t.test( ) t.match(warnings[0][2], { name: 'SymlinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', }) t.end() }) t.test('extract through symlink', t => { const warnings = [] + const cwd = t.testdir({}) const u = new Unpack({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), preservePaths: true, }) u.on('close', () => { t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) t.ok( - fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), + fs.lstatSync(cwd + '/d/i/r/dir/x').isFile(), 'x thru link', ) t.ok( - fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), 'x thru link', ) t.end() @@ -537,22 +516,23 @@ t.test( t.test('extract through symlink sync', t => { const warnings = [] + const cwd = t.testdir({}) const u = new UnpackSync({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), preservePaths: true, }) u.end(data) t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.ok(fs.lstatSync(dir + '/d/i/r/dir/x').isFile(), 'x thru link') + t.ok(fs.lstatSync(cwd + '/d/i/r/dir/x').isFile(), 'x thru link') t.ok( - fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), 'x thru link', ) t.end() @@ -560,25 +540,26 @@ t.test( t.test('clobber through symlink', t => { const warnings = [] + const cwd = t.testdir({}) const u = new Unpack({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), unlink: true, }) u.on('close', () => { t.same(warnings, []) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') t.notOk( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'no link', ) t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), + fs.lstatSync(cwd + '/d/i/r/symlink').isDirectory(), 'sym is dir', ) t.ok( - fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), 'x thru link', ) t.end() @@ -590,9 +571,10 @@ t.test( const poop = new Error('poop') // for some reason, resetting fs.unlink in the teardown was breaking const reset = mutateFS.fail('unlink', poop) + const cwd = t.testdir({}) const warnings = [] const u = new Unpack({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), unlink: true, }) @@ -606,50 +588,59 @@ t.test( t.test('clobber through symlink sync', t => { const warnings = [] + const cwd = t.testdir({}) const u = new UnpackSync({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), unlink: true, }) u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') t.notOk( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'no link', ) t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isDirectory(), + fs.lstatSync(cwd + '/d/i/r/symlink').isDirectory(), 'sym is dir', ) t.ok( - fs.lstatSync(dir + '/d/i/r/symlink/x').isFile(), + fs.lstatSync(cwd + '/d/i/r/symlink/x').isFile(), 'x thru link', ) t.end() }) t.test('clobber dirs', t => { - mkdirp.sync(dir + '/d/i/r/dir') - mkdirp.sync(dir + '/d/i/r/file') - mkdirp.sync(dir + '/d/i/r/link') - mkdirp.sync(dir + '/d/i/r/symlink') + const cwd = t.testdir({ + d: { + i: { + r: { + dir: {}, + file: {}, + link: {}, + symlink: {}, + }, + }, + }, + }) const warnings = [] const u = new Unpack({ - cwd: dir, + cwd, onwarn: (c, w, d) => { warnings.push([c, w, d]) }, chmod: true, }) u.on('close', () => { - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') t.equal( @@ -658,8 +649,8 @@ t.test( ) t.match(warnings[0][2], { name: 'SymlinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', }) t.end() }) @@ -667,13 +658,21 @@ t.test( }) t.test('clobber dirs sync', t => { - mkdirp.sync(dir + '/d/i/r/dir') - mkdirp.sync(dir + '/d/i/r/file') - mkdirp.sync(dir + '/d/i/r/link') - mkdirp.sync(dir + '/d/i/r/symlink') + const cwd = t.testdir({ + d: { + i: { + r: { + dir: {}, + file: {}, + link: {}, + symlink: {}, + }, + }, + }, + }) const warnings = [] const u = new UnpackSync({ - cwd: dir, + cwd, onwarn: (c, w, d) => { warnings.push([c, w, d]) }, @@ -681,13 +680,13 @@ t.test( processUmask: 0o22, }) u.end(data) - t.equal(fs.lstatSync(dir + '/d/i/r/dir').mode & 0o7777, 0o751) - t.ok(fs.lstatSync(dir + '/d/i/r/file').isFile(), 'got file') + t.equal(fs.lstatSync(cwd + '/d/i/r/dir').mode & 0o7777, 0o751) + t.ok(fs.lstatSync(cwd + '/d/i/r/file').isFile(), 'got file') t.ok( - fs.lstatSync(dir + '/d/i/r/symlink').isSymbolicLink(), + fs.lstatSync(cwd + '/d/i/r/symlink').isSymbolicLink(), 'got symlink', ) - t.throws(() => fs.statSync(dir + '/d/i/r/symlink/x')) + t.throws(() => fs.statSync(cwd + '/d/i/r/symlink/x')) t.equal(warnings.length, 1) t.equal(warnings[0][0], 'TAR_ENTRY_ERROR') t.equal( @@ -696,8 +695,8 @@ t.test( ) t.match(warnings[0][2], { name: 'SymlinkError', - path: dir + '/d/i/r/symlink/', - symlink: dir + '/d/i/r/symlink', + path: cwd + '/d/i/r/symlink/', + symlink: cwd + '/d/i/r/symlink', }) t.end() }) @@ -707,9 +706,6 @@ t.test( ) t.test('unsupported entries', t => { - const dir = path.resolve(unpackdir, 'unsupported-entries') - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) const unknown = new Header({ path: 'qux', size: 4 }) unknown.encode() unknown.block?.write('Z', 156) @@ -736,9 +732,10 @@ t.test('unsupported entries', t => { ]) t.test('basic, warns', t => { + const cwd = t.testdir({}) const warnings = [] const u = new Unpack({ - cwd: dir, + cwd, onwarn: (c, w, d) => warnings.push([c, w, d]), }) const c = 'TAR_ENTRY_UNSUPPORTED' @@ -766,7 +763,7 @@ t.test('unsupported entries', t => { ], ] u.on('close', () => { - t.equal(fs.readdirSync(dir).length, 0) + t.equal(fs.readdirSync(cwd).length, 0) t.match(warnings, expect) t.end() }) @@ -774,16 +771,17 @@ t.test('unsupported entries', t => { }) t.test('strict, throws', t => { + const cwd = t.testdir({}) const warnings = [] const errors = [] const u = new Unpack({ - cwd: dir, + cwd, strict: true, onwarn: (c, w, d) => warnings.push([c, w, d]), }) u.on('error', e => errors.push(e)) u.on('close', () => { - t.equal(fs.readdirSync(dir).length, 0) + t.equal(fs.readdirSync(cwd).length, 0) t.same(warnings, []) t.match(errors, [ { @@ -808,14 +806,6 @@ t.test('unsupported entries', t => { }) t.test('file in dir path', t => { - const dir = path.resolve(unpackdir, 'file-junk') - - t.teardown(() => rimraf(dir)) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - const data = makeTar([ { path: 'd/i/r/file', @@ -841,40 +831,46 @@ t.test('file in dir path', t => { t.test('fail because of file', t => { const check = t => { - t.equal(fs.readFileSync(dir + '/d/i/r/file', 'utf8'), 'a') - t.throws(() => fs.statSync(dir + '/d/i/r/file/a/b/c')) + const cwd = t.testdirName + t.equal(fs.readFileSync(cwd + '/d/i/r/file', 'utf8'), 'a') + t.throws(() => fs.statSync(cwd + '/d/i/r/file/a/b/c')) t.end() } t.plan(2) t.test('async', t => { - new Unpack({ cwd: dir }).on('close', () => check(t)).end(data) + const cwd = t.testdir({}) + new Unpack({ cwd }).on('close', () => check(t)).end(data) }) t.test('sync', t => { - new UnpackSync({ cwd: dir }).end(data) + const cwd = t.testdir({}) + new UnpackSync({ cwd }).end(data) check(t) }) }) t.test('clobber on through', t => { const check = t => { - t.ok(fs.statSync(dir + '/d/i/r/file').isDirectory()) - t.equal(fs.readFileSync(dir + '/d/i/r/file/a/b/c', 'utf8'), 'b') + const cwd = t.testdirName + t.ok(fs.statSync(cwd + '/d/i/r/file').isDirectory()) + t.equal(fs.readFileSync(cwd + '/d/i/r/file/a/b/c', 'utf8'), 'b') t.end() } t.plan(2) t.test('async', t => { - new Unpack({ cwd: dir, unlink: true }) + const cwd = t.testdir({}) + new Unpack({ cwd, unlink: true }) .on('close', () => check(t)) .end(data) }) t.test('sync', t => { - new UnpackSync({ cwd: dir, unlink: true }).end(data) + const cwd = t.testdir({}) + new UnpackSync({ cwd, unlink: true }).end(data) check(t) }) }) @@ -883,9 +879,7 @@ t.test('file in dir path', t => { }) t.test('set umask option', t => { - const dir = path.resolve(unpackdir, 'umask') - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) + const cwd = t.testdir({}) const data = makeTar([ { @@ -899,15 +893,15 @@ t.test('set umask option', t => { new Unpack({ umask: 0o027, - cwd: dir, + cwd, }) .on('close', () => { t.equal( - fs.statSync(dir + '/d/i/r').mode & 0o7777, + fs.statSync(cwd + '/d/i/r').mode & 0o7777, isWindows ? 0o666 : 0o750, ) t.equal( - fs.statSync(dir + '/d/i/r/dir').mode & 0o7777, + fs.statSync(cwd + '/d/i/r/dir').mode & 0o7777, isWindows ? 0o666 : 0o751, ) t.end() @@ -916,7 +910,7 @@ t.test('set umask option', t => { }) t.test('absolute paths', t => { - const dir = path.join(unpackdir, 'absolute-paths') + const dir = t.testdir({}) t.teardown(() => rimraf(dir)) t.beforeEach(async () => { await rimraf(dir) @@ -1039,8 +1033,7 @@ t.test('absolute paths', t => { }) t.test('.. paths', t => { - const dir = path.join(unpackdir, 'dotted-paths') - t.teardown(() => rimraf(dir)) + const dir = t.testdir({}) t.beforeEach(async () => { await rimraf(dir) await mkdirp(dir) @@ -1147,7 +1140,7 @@ t.test('.. paths', t => { t.test('fail all stats', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const dir = normPath(path.join(unpackdir, 'stat-fail')) + const dir = normPath(t.testdir({})) const { stat, fstat, lstat, statSync, fstatSync, lstatSync } = fs const unmutate = () => Object.assign(fs, { @@ -1287,12 +1280,8 @@ t.test('fail all stats', t => { t.test('fail symlink', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const unmutate = mutateFS.fail('symlink', poop) - const dir = path.join(unpackdir, 'symlink-fail') - t.teardown(async () => { - unmutate() - await rimraf(dir) - }) + const dir = t.testdir({}) + t.teardown(mutateFS.fail('symlink', poop)) const warnings = [] t.beforeEach(async () => { @@ -1353,12 +1342,8 @@ t.test('fail symlink', t => { t.test('fail chmod', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const unmutate = mutateFS.fail('chmod', poop) - const dir = path.join(unpackdir, 'chmod-fail') - t.teardown(async () => { - unmutate() - await rimraf(dir) - }) + const dir = t.testdir() + t.teardown(mutateFS.fail('chmod', poop)) const warnings = [] t.beforeEach(async () => { @@ -1423,8 +1408,7 @@ t.test('fail mkdir', t => { const poop = new Error('poop') poop.code = 'EPOOP' let unmutate - const dir = path.join(unpackdir, 'mkdir-fail') - t.teardown(() => rimraf(dir)) + const dir = t.testdir({}) const warnings = [] t.beforeEach(async () => { @@ -1488,12 +1472,8 @@ t.test('fail mkdir', t => { t.test('fail write', t => { const poop = new Error('poop') poop.code = 'EPOOP' - const unmutate = mutateFS.fail('write', poop) - const dir = path.join(unpackdir, 'write-fail') - t.teardown(async () => { - unmutate() - await rimraf(dir) - }) + const dir = t.testdir({}) + t.teardown(mutateFS.fail('write', poop)) const warnings = [] t.beforeEach(async () => { @@ -1544,14 +1524,11 @@ t.test('fail write', t => { }) t.test('skip existing', t => { - const dir = path.join(unpackdir, 'skip-newer') - t.teardown(() => rimraf(dir)) - const date = new Date('2011-03-27T22:16:31.000Z') - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - fs.writeFileSync(dir + '/x', 'y') + t.beforeEach(async t => { + const dir = t.testdir({ + x: 'y', + }) fs.utimesSync(dir + '/x', date, date) }) @@ -1569,6 +1546,7 @@ t.test('skip existing', t => { ]) const check = t => { + const dir = t.testdirName const st = fs.lstatSync(dir + '/x') t.equal(st.atime.toISOString(), date.toISOString()) t.equal(st.mtime.toISOString(), date.toISOString()) @@ -1578,6 +1556,7 @@ t.test('skip existing', t => { } t.test('async', t => { + const dir = t.testdirName new Unpack({ cwd: dir, keep: true, @@ -1587,6 +1566,7 @@ t.test('skip existing', t => { }) t.test('sync', t => { + const dir = t.testdirName new UnpackSync({ cwd: dir, keep: true, @@ -1598,14 +1578,9 @@ t.test('skip existing', t => { }) t.test('skip newer', t => { - const dir = path.join(unpackdir, 'skip-newer') - t.teardown(() => rimraf(dir)) - const date = new Date('2013-12-19T17:00:00.000Z') - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - fs.writeFileSync(dir + '/x', 'y') + t.beforeEach(async t => { + const dir = t.testdir({ x: 'y' }) fs.utimesSync(dir + '/x', date, date) }) @@ -1623,6 +1598,7 @@ t.test('skip newer', t => { ]) const check = t => { + const dir = t.testdirName const st = fs.lstatSync(dir + '/x') t.equal(st.atime.toISOString(), date.toISOString()) t.equal(st.mtime.toISOString(), date.toISOString()) @@ -1633,7 +1609,7 @@ t.test('skip newer', t => { t.test('async', t => { new Unpack({ - cwd: dir, + cwd: t.testdirName, newer: true, }) .on('close', () => check(t)) @@ -1642,7 +1618,7 @@ t.test('skip newer', t => { t.test('sync', t => { new UnpackSync({ - cwd: dir, + cwd: t.testdirName, newer: true, }).end(data) check(t) @@ -1652,14 +1628,6 @@ t.test('skip newer', t => { }) t.test('no mtime', t => { - const dir = path.join(unpackdir, 'skip-newer') - t.teardown(() => rimraf(dir)) - - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - const date = new Date('2011-03-27T22:16:31.000Z') const data = makeTar([ { @@ -1685,6 +1653,7 @@ t.test('no mtime', t => { ]) const check = t => { + const dir = t.testdirName // this may fail if it's run on March 27, 2011 const stx = fs.lstatSync(dir + '/x') t.not(stx.atime.toISOString(), date.toISOString()) @@ -1698,6 +1667,7 @@ t.test('no mtime', t => { } t.test('async', t => { + const dir = t.testdir({}) new Unpack({ cwd: dir, noMtime: true, @@ -1707,6 +1677,7 @@ t.test('no mtime', t => { }) t.test('sync', t => { + const dir = t.testdir({}) new UnpackSync({ cwd: dir, noMtime: true, @@ -1718,9 +1689,7 @@ t.test('no mtime', t => { }) t.test('unpack big enough to pause/drain', t => { - const dir = path.resolve(unpackdir, 'drain-clog') - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) + const dir = t.testdir({}) const stream = fs.createReadStream(fixtures + '/parses.tar') const u = new Unpack({ cwd: dir, @@ -1747,31 +1716,22 @@ t.test('set owner', t => { // fake it on platforms that don't have getuid const myUid = 501 const myGid = 1024 - const getuid = process.getuid - const getgid = process.getgid - process.getuid = () => myUid - process.getgid = () => myGid - t.teardown( - () => ((process.getuid = getuid), (process.getgid = getgid)), - ) + t.capture(process, 'getuid', () => myUid) + t.capture(process, 'getgid', () => myGid) // can't actually do this because it requires root, but we can // verify that chown gets called. t.test('as root, defaults to true', t => { - const getuid = process.getuid - process.getuid = () => 0 + t.capture(process, 'getuid', () => 0) const u = new Unpack() t.equal(u.preserveOwner, true, 'preserveOwner enabled') - process.getuid = getuid t.end() }) t.test('as non-root, defaults to false', t => { - const getuid = process.getuid - process.getuid = () => 501 + t.capture(process, 'getuid', () => 501) const u = new Unpack() t.equal(u.preserveOwner, false, 'preserveOwner disabled') - process.getuid = getuid t.end() }) @@ -1840,7 +1800,6 @@ t.test('set owner', t => { ]) t.test('chown failure results in unpack failure', t => { - const dir = path.resolve(unpackdir, 'chown') const poop = new Error('expected chown failure') const un = mutateFS.fail('chown', poop) const unl = mutateFS.fail('lchown', poop) @@ -1850,15 +1809,13 @@ t.test('set owner', t => { un() unf() unl() - await rimraf(dir) }) t.test('sync', t => { - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) + const cwd = t.testdir({}) let warned = false const u = new UnpackSync({ - cwd: dir, + cwd, preserveOwner: true, onwarn: (_c, _m, er) => { if (!warned) { @@ -1872,11 +1829,10 @@ t.test('set owner', t => { }) t.test('async', t => { - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) + const cwd = t.testdir({}) let warned = false const u = new Unpack({ - cwd: dir, + cwd, preserveOwner: true, onwarn: (_c, _m, er) => { if (!warned) { @@ -1893,7 +1849,6 @@ t.test('set owner', t => { }) t.test('chown when true', t => { - const dir = path.resolve(unpackdir, 'chown') const chown = fs.chown const lchown = fs.lchown const fchown = fs.fchown @@ -1920,20 +1875,18 @@ t.test('set owner', t => { }) t.test('sync', t => { - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) + const cwd = t.testdir({}) called = 0 - const u = new UnpackSync({ cwd: dir, preserveOwner: true }) + const u = new UnpackSync({ cwd, preserveOwner: true }) u.end(data) t.ok(called >= 5, 'called chowns') t.end() }) t.test('async', t => { - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) + const cwd = t.testdir({}) called = 0 - const u = new Unpack({ cwd: dir, preserveOwner: true }) + const u = new Unpack({ cwd, preserveOwner: true }) u.end(data) u.on('close', () => { t.ok(called >= 5, 'called chowns') @@ -1945,7 +1898,6 @@ t.test('set owner', t => { }) t.test('no chown when false', t => { - const dir = path.resolve(unpackdir, 'nochown') const poop = new Error('poop') const un = mutateFS.fail('chown', poop) const unf = mutateFS.fail('fchown', poop) @@ -1954,13 +1906,10 @@ t.test('set owner', t => { un() unf() unl() - await rimraf(dir) }) - t.beforeEach(() => mkdirp(dir)) - t.afterEach(() => rimraf(dir)) - const check = t => { + const dir = t.testdirName const dirStat = fs.statSync(dir + '/foo') t.not(dirStat.uid, 2456124561) t.not(dirStat.gid, 813708013) @@ -1977,12 +1926,14 @@ t.test('set owner', t => { } t.test('sync', t => { + const dir = t.testdir({}) const u = new UnpackSync({ cwd: dir, preserveOwner: false }) u.end(data) check(t) }) t.test('async', t => { + const dir = t.testdir({}) const u = new Unpack({ cwd: dir, preserveOwner: false }) u.end(data) u.on('close', () => check(t)) @@ -2011,11 +1962,8 @@ t.test('unpack when dir is not writable', t => { '', ]) - const dir = path.resolve(unpackdir, 'nowrite-dir') - t.beforeEach(() => mkdirp(dir)) - t.afterEach(() => rimraf(dir)) - const check = t => { + const dir = t.testdirName t.equal( fs.statSync(dir + '/a').mode & 0o7777, isWindows ? 0o666 : 0o744, @@ -2025,12 +1973,14 @@ t.test('unpack when dir is not writable', t => { } t.test('sync', t => { + const dir = t.testdir({}) const u = new UnpackSync({ cwd: dir, strict: true }) u.end(data) check(t) }) t.test('async', t => { + const dir = t.testdir({}) const u = new Unpack({ cwd: dir, strict: true }) u.end(data) u.on('close', () => check(t)) @@ -2051,21 +2001,19 @@ t.test('transmute chars on windows', t => { '', ]) - const dir = path.resolve(unpackdir, 'winchars') - t.beforeEach(() => mkdirp(dir)) - t.afterEach(() => rimraf(dir)) - const hex = 'ef80bcef81bcef80beef80bfef80ba2e747874' const uglyName = Buffer.from(hex, 'hex').toString() - const ugly = path.resolve(dir, uglyName) const check = t => { + const dir = t.testdirName + const ugly = path.resolve(dir, uglyName) t.same(fs.readdirSync(dir), [uglyName]) t.equal(fs.readFileSync(ugly, 'utf8'), '<|>?:') t.end() } t.test('async', t => { + const dir = t.testdir({}) const u = new Unpack({ cwd: dir, win32: true, @@ -2075,6 +2023,7 @@ t.test('transmute chars on windows', t => { }) t.test('sync', t => { + const dir = t.testdir({}) const u = new UnpackSync({ cwd: dir, win32: true, @@ -2119,8 +2068,6 @@ t.test('safely transmute chars on windows with absolutes', t => { }) t.test('use explicit chmod when required by umask', t => { - const basedir = path.resolve(unpackdir, 'umask-chmod') - const data = makeTar([ { path: 'x/y/z', @@ -2132,16 +2079,16 @@ t.test('use explicit chmod when required by umask', t => { ]) const check = async t => { - const st = fs.statSync(basedir + '/x/y/z') + const cwd = t.testdirName + const st = fs.statSync(cwd + '/x/y/z') t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o775) - await rimraf(basedir) t.end() } t.test('async', t => { - mkdirp.sync(basedir) + const cwd = t.testdir({}) const unpack = new Unpack({ - cwd: basedir, + cwd, chmod: true, processUmask: 0o22, }) @@ -2150,9 +2097,9 @@ t.test('use explicit chmod when required by umask', t => { }) return t.test('sync', t => { - mkdirp.sync(basedir) + const cwd = t.testdir({}) const unpack = new UnpackSync({ - cwd: basedir, + cwd, chmod: true, processUmask: 0o22, }) @@ -2166,8 +2113,6 @@ t.test('dont use explicit chmod if chmod flag not set', t => { throw new Error('should not call process.umask()') }) - const basedir = path.resolve(unpackdir, 'umask-no-chmod') - const data = makeTar([ { path: 'x/y/z', @@ -2179,29 +2124,29 @@ t.test('dont use explicit chmod if chmod flag not set', t => { ]) const check = async t => { - const st = fs.statSync(basedir + '/x/y/z') + const cwd = t.testdirName + const st = fs.statSync(cwd + '/x/y/z') t.equal(st.mode & 0o777, isWindows ? 0o666 : 0o755) - await rimraf(basedir) t.end() } t.test('async', t => { - mkdirp.sync(basedir) - const unpack = new Unpack({ cwd: basedir }) + const cwd = t.testdir({}) + const unpack = new Unpack({ cwd }) unpack.on('close', () => check(t)) unpack.end(data) }) return t.test('sync', t => { - mkdirp.sync(basedir) - const unpack = new UnpackSync({ cwd: basedir }) + const cwd = t.testdir({}) + const unpack = new UnpackSync({ cwd }) unpack.end(data) check(t) }) }) t.test('chown implicit dirs and also the entries', t => { - const basedir = path.resolve(unpackdir, 'chownr') + const basedir = t.testdir({}) // club these so that the test can run as non-root const chown = fs.chown @@ -2340,9 +2285,7 @@ t.test('chown implicit dirs and also the entries', t => { }) t.test('bad cwd setting', t => { - const basedir = path.resolve(unpackdir, 'bad-cwd') - mkdirp.sync(basedir) - t.teardown(() => rimraf(basedir)) + const basedir = t.testdir({}) const cases = [ // the cwd itself @@ -2435,9 +2378,6 @@ t.test('bad cwd setting', t => { }) t.test('transform', t => { - const basedir = path.resolve(unpackdir, 'transform') - t.teardown(() => rimraf(basedir)) - const cases = { 'emptypax.tar': { '🌟.txt': '🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠\n', @@ -2488,13 +2428,9 @@ t.test('transform', t => { tarfiles.forEach(tarfile => { t.test(tarfile, t => { const tf = path.resolve(tars, tarfile) - const dir = path.resolve(basedir, tarfile) - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) const check = t => { + const dir = t.testdirName const expect = cases[tarfile] Object.keys(expect).forEach(file => { const f = path.resolve(dir, file) @@ -2508,6 +2444,7 @@ t.test('transform', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, strict: true, @@ -2517,6 +2454,7 @@ t.test('transform', t => { eos(unpack, () => check(t)) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, transform: txFn }) fs.createReadStream(tf).pipe(unpack) eos(unpack, () => check(t)) @@ -2526,6 +2464,7 @@ t.test('transform', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, strict: true, @@ -2535,6 +2474,7 @@ t.test('transform', t => { check(t) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, transform: txFn }) unpack.end(fs.readFileSync(tf)) check(t) @@ -2545,10 +2485,6 @@ t.test('transform', t => { }) t.test('transform error', t => { - const dir = path.resolve(unpackdir, 'transform-error') - mkdirp.sync(dir) - t.teardown(() => rimraf(dir)) - const tarfile = path.resolve(tars, 'body-byte-counts.tar') const tardata = fs.readFileSync(tarfile) const poop = new Error('poop') @@ -2562,6 +2498,7 @@ t.test('transform error', t => { t.test('sync unpack', t => { t.test('strict', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, strict: true, @@ -2578,6 +2515,7 @@ t.test('transform error', t => { t.end() }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, transform: txFn }) const expect = 3 let actual = 0 @@ -2592,6 +2530,7 @@ t.test('transform error', t => { t.end() }) t.test('async unpack', t => { + const dir = t.testdir({}) // the last error is about the folder being deleted, just ignore that one t.test('strict', t => { const unpack = new Unpack({ @@ -2608,6 +2547,7 @@ t.test('transform error', t => { unpack.end(tardata) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, transform: txFn }) t.plan(3) t.teardown(() => unpack.removeAllListeners('warn')) @@ -2622,23 +2562,16 @@ t.test('transform error', t => { t.test('futimes/fchown failures', t => { const archive = path.resolve(tars, 'utf8.tar') - const dir = path.resolve(unpackdir, 'futimes-fchown-fails') const tardata = fs.readFileSync(archive) const poop = new Error('poop') const second = new Error('second error') - t.beforeEach(async () => { - await rimraf(dir) - await mkdirp(dir) - }) - - t.teardown(() => rimraf(dir)) - const methods = ['utimes', 'chown'] methods.forEach(method => { const fc = method === 'chown' t.test(method + ' fallback', t => { + const dir = t.testdir({}) t.teardown(mutateFS.fail('f' + method, poop)) // forceChown will fail on systems where the user is not root // and/or the uid/gid in the archive aren't valid. We're just @@ -2694,6 +2627,7 @@ t.test('futimes/fchown failures', t => { t.test('async unpack', t => { t.plan(2) t.test('strict', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, strict: true, @@ -2704,6 +2638,7 @@ t.test('futimes/fchown failures', t => { unpack.end(tardata) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, forceChown: fc }) t.plan(3) unpack.on('warn', (_code, _m, er) => t.equal(er, poop)) @@ -2713,6 +2648,7 @@ t.test('futimes/fchown failures', t => { t.test('sync unpack', t => { t.plan(2) t.test('strict', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, strict: true, @@ -2723,6 +2659,7 @@ t.test('futimes/fchown failures', t => { unpack.end(tardata) }) t.test('loose', t => { + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, forceChown: fc }) t.plan(3) unpack.on('warn', (_c, _m, er) => t.equal(er, poop)) @@ -2736,10 +2673,6 @@ t.test('futimes/fchown failures', t => { }) t.test('onentry option is preserved', t => { - const basedir = path.resolve(unpackdir, 'onentry-method') - mkdirp.sync(basedir) - t.teardown(() => rimraf(basedir)) - let oecalls = 0 const onentry = _entry => oecalls++ const data = makeTar([ @@ -2772,15 +2705,14 @@ t.test('onentry option is preserved', t => { } t.test('sync', t => { - const dir = path.join(basedir, 'sync') - mkdirp.sync(dir) + const dir = t.testdir({}) const unpack = new UnpackSync({ cwd: dir, onentry }) unpack.end(data) check(t) }) t.test('async', t => { - const dir = path.join(basedir, 'async') + const dir = t.testdir({}) mkdirp.sync(dir) const unpack = new Unpack({ cwd: dir, onentry }) unpack.on('finish', () => check(t)) @@ -2791,10 +2723,6 @@ t.test('onentry option is preserved', t => { }) t.test('do not reuse hardlinks, only nlink=1 files', t => { - const basedir = path.resolve(unpackdir, 'hardlink-reuse') - mkdirp.sync(basedir) - t.teardown(() => rimraf(basedir)) - const now = new Date('2018-04-30T18:30:39.025Z') const data = makeTar([ @@ -2831,25 +2759,24 @@ t.test('do not reuse hardlinks, only nlink=1 files', t => { } const check = t => { + const dir = t.testdirName for (const f in checks) { - t.equal( - fs.readFileSync(basedir + '/' + f, 'utf8'), - checks[f], - f, - ) - t.equal(fs.statSync(basedir + '/' + f).nlink, 1, f) + t.equal(fs.readFileSync(dir + '/' + f, 'utf8'), checks[f], f) + t.equal(fs.statSync(dir + '/' + f).nlink, 1, f) } t.end() } t.test('async', t => { - const u = new Unpack({ cwd: basedir }) + const dir = t.testdir({}) + const u = new Unpack({ cwd: dir }) u.on('close', () => check(t)) u.end(data) }) t.test('sync', t => { - const u = new UnpackSync({ cwd: basedir }) + const dir = t.testdir({}) + const u = new UnpackSync({ cwd: dir }) u.end(data) check(t) }) @@ -2860,19 +2787,18 @@ t.test('do not reuse hardlinks, only nlink=1 files', t => { t.test('trying to unpack a non-zlib gzip file should fail', t => { const data = Buffer.from('hello this is not gzip data') const dataGzip = Buffer.concat([Buffer.from([0x1f, 0x8b]), data]) - const basedir = path.resolve(unpackdir, 'bad-archive') + t.test('abort if gzip has an error', t => { - t.plan(2) const expect = { message: /^zlib/, errno: Number, code: /^Z/, recoverable: false, - cwd: normPath(basedir), + cwd: normPath(t.testdirName), tarCode: 'TAR_ABORT', } const opts = { - cwd: basedir, + cwd: t.testdir({}), gzip: true, } new Unpack(opts) @@ -2887,6 +2813,7 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { 'sync throws', { skip }, ) + t.end() }) t.test('bad archive if no gzip', t => { @@ -2895,9 +2822,9 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { tarCode: 'TAR_BAD_ARCHIVE', recoverable: false, } - const opts = { cwd: basedir } + const opts = { cwd: t.testdir({}) } new Unpack(opts) - .on('error', er => t.match(er, expect, 'async emits')) + .once('error', er => t.match(er, expect, 'async emits')) .end(data) t.throws( () => new UnpackSync(opts).end(data), @@ -2911,17 +2838,9 @@ t.test('trying to unpack a non-zlib gzip file should fail', t => { t.test('handle errors on fs.close', t => { const poop = new Error('poop') - const { close, closeSync } = fs // have to actually close them, or else windows gets mad - fs.close = (fd, cb) => close(fd, () => cb(poop)) - fs.closeSync = fd => { - closeSync(fd) - throw poop - } - t.teardown(() => Object.assign(fs, { close, closeSync })) - const dir = path.resolve(unpackdir, 'close-fail') - mkdirp.sync(dir + '/sync') - mkdirp.sync(dir + '/async') + t.teardown(mutateFS.fail('close', poop)) + const data = makeTar([ { path: 'file', @@ -2937,18 +2856,24 @@ t.test('handle errors on fs.close', t => { ]) t.plan(2) - new Unpack({ cwd: dir + '/async', strict: true }) - .on('error', er => t.equal(er, poop, 'async')) - .end(data) - t.throws( - () => - new UnpackSync({ - cwd: normPath(dir + '/sync'), - strict: true, - }).end(data), - poop, - 'sync', - ) + t.test('async', t => { + new Unpack({ cwd: t.testdir({}), strict: true }) + .on('error', er => t.equal(er, poop, 'async')) + .on('end', () => t.end()) + .end(data) + }) + t.test('sync', t => { + t.throws( + () => + new UnpackSync({ + cwd: normPath(t.testdir({})), + strict: true, + }).end(data), + poop, + 'sync', + ) + t.end() + }) }) t.test( @@ -2957,9 +2882,6 @@ t.test( skip: isWindows && 'symlinks not fully supported', }, t => { - const dir = path.resolve(unpackdir, 'dir-cache-error') - mkdirp.sync(dir + '/sync/y') - mkdirp.sync(dir + '/async/y') const data = makeTar([ { path: 'x', @@ -2996,14 +2918,14 @@ t.test( t.end() } t.test('async', t => { - const path = dir + '/async' + const path = t.testdir({ y: {} }) new Unpack({ cwd: path }) .on('warn', (code, msg) => (WARNINGS[path] = [code, msg])) .on('end', () => check(t, path)) .end(data) }) t.test('sync', t => { - const path = dir + '/sync' + const path = t.testdir({ y: {} }) new UnpackSync({ cwd: path }) .on('warn', (code, msg) => (WARNINGS[path] = [code, msg])) .end(data) @@ -3013,9 +2935,6 @@ t.test( ) t.test('using strip option when top level file exists', t => { - const dir = path.resolve(unpackdir, 'strip-with-top-file') - mkdirp.sync(dir + '/sync/y') - mkdirp.sync(dir + '/async/y') const data = makeTar([ { path: 'top', @@ -3054,13 +2973,13 @@ t.test('using strip option when top level file exists', t => { t.end() } t.test('async', t => { - const path = dir + '/async' + const path = t.testdir({ y: {} }) new Unpack({ cwd: path, strip: 1 }) .on('end', () => check(t, path)) .end(data) }) t.test('sync', t => { - const path = dir + '/sync' + const path = t.testdir({ y: {} }) new UnpackSync({ cwd: path, strip: 1 }).end(data) check(t, path) }) @@ -3110,11 +3029,7 @@ t.test('handle EPERMs when creating symlinks', t => { '', ]) - const dir = path.resolve(unpackdir, 'eperm-symlinks') - mkdirp.sync(`${dir}/sync`) - mkdirp.sync(`${dir}/async`) - - const check = path => { + const check = (t, path) => { t.match( WARNINGS, [ @@ -3134,21 +3049,29 @@ t.test('handle EPERMs when creating symlinks', t => { } const WARNINGS = [] - const u = new Unpack({ - cwd: `${dir}/async`, - onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), + t.test('async', t => { + const dir = t.testdir({}) + const u = new Unpack({ + cwd: dir, + onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), + }) + u.on('end', () => { + check(t, dir) + t.end() + }) + u.end(data) }) - u.on('end', () => { - check(`${dir}/async`) + t.test('sync', t => { + const dir = t.testdir({}) const u = new UnpackSync({ - cwd: `${dir}/sync`, + cwd: dir, onwarn: (code, msg, _er) => WARNINGS.push([code, msg]), }) u.end(data) - check(`${dir}/sync`) + check(t, dir) t.end() }) - u.end(data) + t.end() }) t.test('close fd when error writing', t => { @@ -3186,8 +3109,8 @@ t.test('close fd when error writing', t => { }), ) const WARNINGS = [] - const dir = path.resolve(unpackdir, 'close-on-write-error') - mkdirp.sync(dir) + + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, onwarn: (code, msg) => WARNINGS.push([code, msg]), @@ -3225,15 +3148,14 @@ t.test('close fd when error setting mtime', t => { const CLOSES = [] const OPENS = {} const { open } = fs - t.teardown(() => (fs.open = open)) - fs.open = (...args) => { + t.capture(fs, 'open', (...args) => { const cb = args.pop() args.push((er, fd) => { OPENS[args[0]] = fd cb(er, fd) }) return open.call(fs, ...args) - } + }) t.teardown( mutateFS.mutateArgs('close', ([fd]) => { CLOSES.push(fd) @@ -3241,8 +3163,7 @@ t.test('close fd when error setting mtime', t => { }), ) const WARNINGS = [] - const dir = path.resolve(unpackdir, 'close-on-futimes-error') - mkdirp.sync(dir) + const dir = t.testdir({}) const unpack = new Unpack({ cwd: dir, onwarn: (code, msg) => WARNINGS.push([code, msg]), @@ -3272,11 +3193,8 @@ t.test('do not hang on large files that fail to open()', t => { '', ]) t.teardown(mutateFS.fail('open', new Error('nope'))) - const dir = path.resolve( - unpackdir, - 'no-hang-for-large-file-failures', - ) - mkdirp.sync(dir) + const dir = t.testdir({}) + const WARNINGS = [] const unpack = new Unpack({ cwd: dir, From 314ec7e64245f86663c8ca20fad05ebb5a390d80 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 10:26:05 -0700 Subject: [PATCH 09/12] list: close file even if no error thrown Fix: #404 --- src/list.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/list.ts b/src/list.ts index 7ba43143..9173edaa 100644 --- a/src/list.ts +++ b/src/list.ts @@ -130,7 +130,6 @@ const filesFilter = (opt: TarOptions, files: string[]) => { const listFileSync = (opt: TarOptionsSyncFile) => { const p = list_(opt) const file = opt.file - let threw = true let fd try { const stat = fs.statSync(file) @@ -148,9 +147,8 @@ const listFileSync = (opt: TarOptionsSyncFile) => { } p.end() } - threw = false } finally { - if (threw && fd) { + if (typeof fd === 'number') { try { fs.closeSync(fd) /* c8 ignore next */ From 2d89a4edc3dd76aef0bde3a9913cdb4f9c9d3b77 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 10:59:50 -0700 Subject: [PATCH 10/12] Properly handle long linkpath in PaxHeader tar-stream creates some interesting tarballs, but they are technically allowed, and should be handled properly. Fix: #312 Also, this cleans up a flaky race condition in the unpack test. --- src/parse.ts | 1 + test/extract.js | 14 ++++++++++++++ test/fixtures/long-linkname.tar | Bin 0 -> 2560 bytes test/unpack.js | 8 ++++++-- 4 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 test/fixtures/long-linkname.tar diff --git a/src/parse.ts b/src/parse.ts index d0b0781e..0147f786 100644 --- a/src/parse.ts +++ b/src/parse.ts @@ -199,6 +199,7 @@ export class Parser extends EE implements Warner { }) } else if ( !/^(Symbolic)?Link$/.test(type) && + !/^(Global)?ExtendedHeader$/.test(type) && header.linkpath ) { this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { diff --git a/test/extract.js b/test/extract.js index 8fe8eff1..ce2afeb7 100644 --- a/test/extract.js +++ b/test/extract.js @@ -415,3 +415,17 @@ t.test('brotli', async t => { t.end() }) }) + +t.test('verify long linkname is not a problem', async t => { + // See: https://github.com/isaacs/node-tar/issues/312 + const file = path.resolve(__dirname, 'fixtures/long-linkname.tar') + t.test('sync', t => { + x({ sync: true, strict: true, file, C: t.testdir({}) }) + t.ok(fs.lstatSync(t.testdirName + '/test').isSymbolicLink()) + t.end() + }) + t.test('async', async t => { + await x({ file, C: t.testdir({}), strict: true }) + t.ok(fs.lstatSync(t.testdirName + '/test').isSymbolicLink()) + }) +}) diff --git a/test/fixtures/long-linkname.tar b/test/fixtures/long-linkname.tar new file mode 100644 index 0000000000000000000000000000000000000000..34c0ea57d17b34911ef760553c67b7428d611138 GIT binary patch literal 2560 zcmWGYtnf%pOi3+bpdBzUFfcPQQD6YlK!8pg7#NuvD;S!X8JL=yn3R(TnHw1@ zFjNH4!EvRhemSotLq!yQO8Jb!sR?2=#j;MSp#qW2Neov`QOX{nE#FF;kXf=|3_A8ANB2M2#kgR HatHtb { if (!warned) { warned = true t.equal(er, poop) - t.end() } }, }) u.end(data) + t.equal(warned, true) + t.end() }) t.test('async', t => { @@ -1838,10 +1839,13 @@ t.test('set owner', t => { if (!warned) { warned = true t.equal(er, poop) - t.end() } }, }) + u.on('finish', () => { + t.equal(warned, true) + t.end() + }) u.end(data) }) From 9a260c2dbaf9090c34872944393dfd854940c7c6 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 11:08:26 -0700 Subject: [PATCH 11/12] test verifying #398 is fixed --- tap-snapshots/test/unpack.js.test.cjs | 18 ++++++++++++++ test/unpack.js | 34 ++++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 tap-snapshots/test/unpack.js.test.cjs diff --git a/tap-snapshots/test/unpack.js.test.cjs b/tap-snapshots/test/unpack.js.test.cjs new file mode 100644 index 00000000..2446eb2e --- /dev/null +++ b/tap-snapshots/test/unpack.js.test.cjs @@ -0,0 +1,18 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/unpack.js > TAP > ignore self-referential hardlinks > async > must match snapshot 1`] = ` +Array [ + "ENOENT: no such file or directory, link '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-async/autolink' -> '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-async/autolink'", +] +` + +exports[`test/unpack.js > TAP > ignore self-referential hardlinks > sync > must match snapshot 1`] = ` +Array [ + "ENOENT: no such file or directory, link '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-sync/autolink' -> '{CWD}/.tap/fixtures/test-unpack.js-ignore-self-referential-hardlinks-sync/autolink'", +] +` diff --git a/test/unpack.js b/test/unpack.js index 2f167a30..bcffa0c9 100644 --- a/test/unpack.js +++ b/test/unpack.js @@ -1,6 +1,6 @@ import { Unpack, UnpackSync } from '../dist/esm/unpack.js' -import fs from 'fs' +import fs, { readdirSync } from 'fs' import { Minipass } from 'minipass' import * as z from 'minizlib' import path from 'path' @@ -3531,3 +3531,35 @@ t.test('excessively deep subfolder nesting', async t => { check(t, 64) }) }) + +t.test('ignore self-referential hardlinks', async t => { + const data = makeTar([ + { + path: 'autolink', + linkpath: './autolink', + type: 'Link', + }, + ]) + const check = (t, warnings) => { + t.matchSnapshot(warnings) + t.strictSame(readdirSync(t.testdirName), [], 'nothing extracted') + t.end() + } + t.test('async', t => { + const cwd = t.testdir({}) + const warnings = [] + const u = new Unpack({ cwd, onwarn: (_, m) => warnings.push(m) }) + u.on('end', () => check(t, warnings)) + u.end(data) + }) + t.test('sync', t => { + const cwd = t.testdir({}) + const warnings = [] + const u = new UnpackSync({ + cwd, + onwarn: (_, m) => warnings.push(m), + }) + u.end(data) + check(t, warnings) + }) +}) From 957da7506cc594f24f54b884305718927194fb73 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 10 Apr 2024 12:06:43 -0700 Subject: [PATCH 12/12] remove old lib folder --- lib/create.js | 111 ---- lib/extract.js | 113 ----- lib/get-write-flag.js | 20 - lib/header.js | 304 ----------- lib/high-level-opt.js | 29 -- lib/large-numbers.js | 104 ---- lib/list.js | 139 ----- lib/mkdir.js | 229 --------- lib/mode-fix.js | 27 - lib/normalize-unicode.js | 12 - lib/normalize-windows-path.js | 8 - lib/pack.js | 432 ---------------- lib/parse.js | 552 -------------------- lib/path-reservations.js | 163 ------ lib/pax.js | 150 ------ lib/read-entry.js | 107 ---- lib/replace.js | 246 --------- lib/strip-absolute-path.js | 24 - lib/strip-trailing-slashes.js | 13 - lib/types.js | 44 -- lib/unpack.js | 923 ---------------------------------- lib/update.js | 40 -- lib/warn-mixin.js | 24 - lib/winchars.js | 23 - lib/write-entry.js | 546 -------------------- 25 files changed, 4383 deletions(-) delete mode 100644 lib/create.js delete mode 100644 lib/extract.js delete mode 100644 lib/get-write-flag.js delete mode 100644 lib/header.js delete mode 100644 lib/high-level-opt.js delete mode 100644 lib/large-numbers.js delete mode 100644 lib/list.js delete mode 100644 lib/mkdir.js delete mode 100644 lib/mode-fix.js delete mode 100644 lib/normalize-unicode.js delete mode 100644 lib/normalize-windows-path.js delete mode 100644 lib/pack.js delete mode 100644 lib/parse.js delete mode 100644 lib/path-reservations.js delete mode 100644 lib/pax.js delete mode 100644 lib/read-entry.js delete mode 100644 lib/replace.js delete mode 100644 lib/strip-absolute-path.js delete mode 100644 lib/strip-trailing-slashes.js delete mode 100644 lib/types.js delete mode 100644 lib/unpack.js delete mode 100644 lib/update.js delete mode 100644 lib/warn-mixin.js delete mode 100644 lib/winchars.js delete mode 100644 lib/write-entry.js diff --git a/lib/create.js b/lib/create.js deleted file mode 100644 index 9c860d4e..00000000 --- a/lib/create.js +++ /dev/null @@ -1,111 +0,0 @@ -'use strict' - -// tar -c -const hlo = require('./high-level-opt.js') - -const Pack = require('./pack.js') -const fsm = require('fs-minipass') -const t = require('./list.js') -const path = require('path') - -module.exports = (opt_, files, cb) => { - if (typeof files === 'function') { - cb = files - } - - if (Array.isArray(opt_)) { - files = opt_, opt_ = {} - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - const opt = hlo(opt_) - - if (opt.sync && typeof cb === 'function') { - throw new TypeError('callback not supported for sync tar functions') - } - - if (!opt.file && typeof cb === 'function') { - throw new TypeError('callback only supported with file option') - } - - return opt.file && opt.sync ? createFileSync(opt, files) - : opt.file ? createFile(opt, files, cb) - : opt.sync ? createSync(opt, files) - : create(opt, files) -} - -const createFileSync = (opt, files) => { - const p = new Pack.Sync(opt) - const stream = new fsm.WriteStreamSync(opt.file, { - mode: opt.mode || 0o666, - }) - p.pipe(stream) - addFilesSync(p, files) -} - -const createFile = (opt, files, cb) => { - const p = new Pack(opt) - const stream = new fsm.WriteStream(opt.file, { - mode: opt.mode || 0o666, - }) - p.pipe(stream) - - const promise = new Promise((res, rej) => { - stream.on('error', rej) - stream.on('close', res) - p.on('error', rej) - }) - - addFilesAsync(p, files) - - return cb ? promise.then(cb, cb) : promise -} - -const addFilesSync = (p, files) => { - files.forEach(file => { - if (file.charAt(0) === '@') { - t({ - file: path.resolve(p.cwd, file.slice(1)), - sync: true, - noResume: true, - onentry: entry => p.add(entry), - }) - } else { - p.add(file) - } - }) - p.end() -} - -const addFilesAsync = (p, files) => { - while (files.length) { - const file = files.shift() - if (file.charAt(0) === '@') { - return t({ - file: path.resolve(p.cwd, file.slice(1)), - noResume: true, - onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) - } else { - p.add(file) - } - } - p.end() -} - -const createSync = (opt, files) => { - const p = new Pack.Sync(opt) - addFilesSync(p, files) - return p -} - -const create = (opt, files) => { - const p = new Pack(opt) - addFilesAsync(p, files) - return p -} diff --git a/lib/extract.js b/lib/extract.js deleted file mode 100644 index 54767982..00000000 --- a/lib/extract.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict' - -// tar -x -const hlo = require('./high-level-opt.js') -const Unpack = require('./unpack.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const stripSlash = require('./strip-trailing-slashes.js') - -module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') { - cb = opt_, files = null, opt_ = {} - } else if (Array.isArray(opt_)) { - files = opt_, opt_ = {} - } - - if (typeof files === 'function') { - cb = files, files = null - } - - if (!files) { - files = [] - } else { - files = Array.from(files) - } - - const opt = hlo(opt_) - - if (opt.sync && typeof cb === 'function') { - throw new TypeError('callback not supported for sync tar functions') - } - - if (!opt.file && typeof cb === 'function') { - throw new TypeError('callback only supported with file option') - } - - if (files.length) { - filesFilter(opt, files) - } - - return opt.file && opt.sync ? extractFileSync(opt) - : opt.file ? extractFile(opt, cb) - : opt.sync ? extractSync(opt) - : extract(opt) -} - -// construct a filter that limits the file entries listed -// include child entries if a dir is included -const filesFilter = (opt, files) => { - const map = new Map(files.map(f => [stripSlash(f), true])) - const filter = opt.filter - - const mapHas = (file, r) => { - const root = r || path.parse(file).root || '.' - const ret = file === root ? false - : map.has(file) ? map.get(file) - : mapHas(path.dirname(file), root) - - map.set(file, ret) - return ret - } - - opt.filter = filter - ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) - : file => mapHas(stripSlash(file)) -} - -const extractFileSync = opt => { - const u = new Unpack.Sync(opt) - - const file = opt.file - const stat = fs.statSync(file) - // This trades a zero-byte read() syscall for a stat - // However, it will usually result in less memory allocation - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - const stream = new fsm.ReadStreamSync(file, { - readSize: readSize, - size: stat.size, - }) - stream.pipe(u) -} - -const extractFile = (opt, cb) => { - const u = new Unpack(opt) - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - - const file = opt.file - const p = new Promise((resolve, reject) => { - u.on('error', reject) - u.on('close', resolve) - - // This trades a zero-byte read() syscall for a stat - // However, it will usually result in less memory allocation - fs.stat(file, (er, stat) => { - if (er) { - reject(er) - } else { - const stream = new fsm.ReadStream(file, { - readSize: readSize, - size: stat.size, - }) - stream.on('error', reject) - stream.pipe(u) - } - }) - }) - return cb ? p.then(cb, cb) : p -} - -const extractSync = opt => new Unpack.Sync(opt) - -const extract = opt => new Unpack(opt) diff --git a/lib/get-write-flag.js b/lib/get-write-flag.js deleted file mode 100644 index e8695999..00000000 --- a/lib/get-write-flag.js +++ /dev/null @@ -1,20 +0,0 @@ -// Get the appropriate flag to use for creating files -// We use fmap on Windows platforms for files less than -// 512kb. This is a fairly low limit, but avoids making -// things slower in some cases. Since most of what this -// library is used for is extracting tarballs of many -// relatively small files in npm packages and the like, -// it can be a big boost on Windows platforms. -// Only supported in Node v12.9.0 and above. -const platform = process.env.__FAKE_PLATFORM__ || process.platform -const isWindows = platform === 'win32' -const fs = global.__FAKE_TESTING_FS__ || require('fs') - -/* istanbul ignore next */ -const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants - -const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP -const fMapLimit = 512 * 1024 -const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY -module.exports = !fMapEnabled ? () => 'w' - : size => size < fMapLimit ? fMapFlag : 'w' diff --git a/lib/header.js b/lib/header.js deleted file mode 100644 index 411d5e45..00000000 --- a/lib/header.js +++ /dev/null @@ -1,304 +0,0 @@ -'use strict' -// parse a 512-byte header block to a data object, or vice-versa -// encode returns `true` if a pax extended header is needed, because -// the data could not be faithfully encoded in a simple header. -// (Also, check header.needPax to see if it needs a pax header.) - -const types = require('./types.js') -const pathModule = require('path').posix -const large = require('./large-numbers.js') - -const SLURP = Symbol('slurp') -const TYPE = Symbol('type') - -class Header { - constructor (data, off, ex, gex) { - this.cksumValid = false - this.needPax = false - this.nullBlock = false - - this.block = null - this.path = null - this.mode = null - this.uid = null - this.gid = null - this.size = null - this.mtime = null - this.cksum = null - this[TYPE] = '0' - this.linkpath = null - this.uname = null - this.gname = null - this.devmaj = 0 - this.devmin = 0 - this.atime = null - this.ctime = null - - if (Buffer.isBuffer(data)) { - this.decode(data, off || 0, ex, gex) - } else if (data) { - this.set(data) - } - } - - decode (buf, off, ex, gex) { - if (!off) { - off = 0 - } - - if (!buf || !(buf.length >= off + 512)) { - throw new Error('need 512 bytes for header') - } - - this.path = decString(buf, off, 100) - this.mode = decNumber(buf, off + 100, 8) - this.uid = decNumber(buf, off + 108, 8) - this.gid = decNumber(buf, off + 116, 8) - this.size = decNumber(buf, off + 124, 12) - this.mtime = decDate(buf, off + 136, 12) - this.cksum = decNumber(buf, off + 148, 12) - - // if we have extended or global extended headers, apply them now - // See https://github.com/npm/node-tar/pull/187 - this[SLURP](ex) - this[SLURP](gex, true) - - // old tar versions marked dirs as a file with a trailing / - this[TYPE] = decString(buf, off + 156, 1) - if (this[TYPE] === '') { - this[TYPE] = '0' - } - if (this[TYPE] === '0' && this.path.slice(-1) === '/') { - this[TYPE] = '5' - } - - // tar implementations sometimes incorrectly put the stat(dir).size - // as the size in the tarball, even though Directory entries are - // not able to have any body at all. In the very rare chance that - // it actually DOES have a body, we weren't going to do anything with - // it anyway, and it'll just be a warning about an invalid header. - if (this[TYPE] === '5') { - this.size = 0 - } - - this.linkpath = decString(buf, off + 157, 100) - if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') { - this.uname = decString(buf, off + 265, 32) - this.gname = decString(buf, off + 297, 32) - this.devmaj = decNumber(buf, off + 329, 8) - this.devmin = decNumber(buf, off + 337, 8) - if (buf[off + 475] !== 0) { - // definitely a prefix, definitely >130 chars. - const prefix = decString(buf, off + 345, 155) - this.path = prefix + '/' + this.path - } else { - const prefix = decString(buf, off + 345, 130) - if (prefix) { - this.path = prefix + '/' + this.path - } - this.atime = decDate(buf, off + 476, 12) - this.ctime = decDate(buf, off + 488, 12) - } - } - - let sum = 8 * 0x20 - for (let i = off; i < off + 148; i++) { - sum += buf[i] - } - - for (let i = off + 156; i < off + 512; i++) { - sum += buf[i] - } - - this.cksumValid = sum === this.cksum - if (this.cksum === null && sum === 8 * 0x20) { - this.nullBlock = true - } - } - - [SLURP] (ex, global) { - for (const k in ex) { - // we slurp in everything except for the path attribute in - // a global extended header, because that's weird. - if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) { - this[k] = ex[k] - } - } - } - - encode (buf, off) { - if (!buf) { - buf = this.block = Buffer.alloc(512) - off = 0 - } - - if (!off) { - off = 0 - } - - if (!(buf.length >= off + 512)) { - throw new Error('need 512 bytes for header') - } - - const prefixSize = this.ctime || this.atime ? 130 : 155 - const split = splitPrefix(this.path || '', prefixSize) - const path = split[0] - const prefix = split[1] - this.needPax = split[2] - - this.needPax = encString(buf, off, 100, path) || this.needPax - this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax - this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax - this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax - this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax - this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax - buf[off + 156] = this[TYPE].charCodeAt(0) - this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax - buf.write('ustar\u000000', off + 257, 8) - this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax - this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax - this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax - this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax - this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax - if (buf[off + 475] !== 0) { - this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax - } else { - this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax - this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax - this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax - } - - let sum = 8 * 0x20 - for (let i = off; i < off + 148; i++) { - sum += buf[i] - } - - for (let i = off + 156; i < off + 512; i++) { - sum += buf[i] - } - - this.cksum = sum - encNumber(buf, off + 148, 8, this.cksum) - this.cksumValid = true - - return this.needPax - } - - set (data) { - for (const i in data) { - if (data[i] !== null && data[i] !== undefined) { - this[i] = data[i] - } - } - } - - get type () { - return types.name.get(this[TYPE]) || this[TYPE] - } - - get typeKey () { - return this[TYPE] - } - - set type (type) { - if (types.code.has(type)) { - this[TYPE] = types.code.get(type) - } else { - this[TYPE] = type - } - } -} - -const splitPrefix = (p, prefixSize) => { - const pathSize = 100 - let pp = p - let prefix = '' - let ret - const root = pathModule.parse(p).root || '.' - - if (Buffer.byteLength(pp) < pathSize) { - ret = [pp, prefix, false] - } else { - // first set prefix to the dir, and path to the base - prefix = pathModule.dirname(pp) - pp = pathModule.basename(pp) - - do { - if (Buffer.byteLength(pp) <= pathSize && - Buffer.byteLength(prefix) <= prefixSize) { - // both fit! - ret = [pp, prefix, false] - } else if (Buffer.byteLength(pp) > pathSize && - Buffer.byteLength(prefix) <= prefixSize) { - // prefix fits in prefix, but path doesn't fit in path - ret = [pp.slice(0, pathSize - 1), prefix, true] - } else { - // make path take a bit from prefix - pp = pathModule.join(pathModule.basename(prefix), pp) - prefix = pathModule.dirname(prefix) - } - } while (prefix !== root && !ret) - - // at this point, found no resolution, just truncate - if (!ret) { - ret = [p.slice(0, pathSize - 1), '', true] - } - } - return ret -} - -const decString = (buf, off, size) => - buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '') - -const decDate = (buf, off, size) => - numToDate(decNumber(buf, off, size)) - -const numToDate = num => num === null ? null : new Date(num * 1000) - -const decNumber = (buf, off, size) => - buf[off] & 0x80 ? large.parse(buf.slice(off, off + size)) - : decSmallNumber(buf, off, size) - -const nanNull = value => isNaN(value) ? null : value - -const decSmallNumber = (buf, off, size) => - nanNull(parseInt( - buf.slice(off, off + size) - .toString('utf8').replace(/\0.*$/, '').trim(), 8)) - -// the maximum encodable as a null-terminated octal, by field size -const MAXNUM = { - 12: 0o77777777777, - 8: 0o7777777, -} - -const encNumber = (buf, off, size, number) => - number === null ? false : - number > MAXNUM[size] || number < 0 - ? (large.encode(number, buf.slice(off, off + size)), true) - : (encSmallNumber(buf, off, size, number), false) - -const encSmallNumber = (buf, off, size, number) => - buf.write(octalString(number, size), off, size, 'ascii') - -const octalString = (number, size) => - padOctal(Math.floor(number).toString(8), size) - -const padOctal = (string, size) => - (string.length === size - 1 ? string - : new Array(size - string.length - 1).join('0') + string + ' ') + '\0' - -const encDate = (buf, off, size, date) => - date === null ? false : - encNumber(buf, off, size, date.getTime() / 1000) - -// enough to fill the longest string we've got -const NULLS = new Array(156).join('\0') -// pad with nulls, return true if it's longer or non-ascii -const encString = (buf, off, size, string) => - string === null ? false : - (buf.write(string + NULLS, off, size, 'utf8'), - string.length !== Buffer.byteLength(string) || string.length > size) - -module.exports = Header diff --git a/lib/high-level-opt.js b/lib/high-level-opt.js deleted file mode 100644 index 40e44180..00000000 --- a/lib/high-level-opt.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -// turn tar(1) style args like `C` into the more verbose things like `cwd` - -const argmap = new Map([ - ['C', 'cwd'], - ['f', 'file'], - ['z', 'gzip'], - ['P', 'preservePaths'], - ['U', 'unlink'], - ['strip-components', 'strip'], - ['stripComponents', 'strip'], - ['keep-newer', 'newer'], - ['keepNewer', 'newer'], - ['keep-newer-files', 'newer'], - ['keepNewerFiles', 'newer'], - ['k', 'keep'], - ['keep-existing', 'keep'], - ['keepExisting', 'keep'], - ['m', 'noMtime'], - ['no-mtime', 'noMtime'], - ['p', 'preserveOwner'], - ['L', 'follow'], - ['h', 'follow'], -]) - -module.exports = opt => opt ? Object.keys(opt).map(k => [ - argmap.has(k) ? argmap.get(k) : k, opt[k], -]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {} diff --git a/lib/large-numbers.js b/lib/large-numbers.js deleted file mode 100644 index b11e72d9..00000000 --- a/lib/large-numbers.js +++ /dev/null @@ -1,104 +0,0 @@ -'use strict' -// Tar can encode large and negative numbers using a leading byte of -// 0xff for negative, and 0x80 for positive. - -const encode = (num, buf) => { - if (!Number.isSafeInteger(num)) { - // The number is so large that javascript cannot represent it with integer - // precision. - throw Error('cannot encode number outside of javascript safe integer range') - } else if (num < 0) { - encodeNegative(num, buf) - } else { - encodePositive(num, buf) - } - return buf -} - -const encodePositive = (num, buf) => { - buf[0] = 0x80 - - for (var i = buf.length; i > 1; i--) { - buf[i - 1] = num & 0xff - num = Math.floor(num / 0x100) - } -} - -const encodeNegative = (num, buf) => { - buf[0] = 0xff - var flipped = false - num = num * -1 - for (var i = buf.length; i > 1; i--) { - var byte = num & 0xff - num = Math.floor(num / 0x100) - if (flipped) { - buf[i - 1] = onesComp(byte) - } else if (byte === 0) { - buf[i - 1] = 0 - } else { - flipped = true - buf[i - 1] = twosComp(byte) - } - } -} - -const parse = (buf) => { - const pre = buf[0] - const value = pre === 0x80 ? pos(buf.slice(1, buf.length)) - : pre === 0xff ? twos(buf) - : null - if (value === null) { - throw Error('invalid base256 encoding') - } - - if (!Number.isSafeInteger(value)) { - // The number is so large that javascript cannot represent it with integer - // precision. - throw Error('parsed number outside of javascript safe integer range') - } - - return value -} - -const twos = (buf) => { - var len = buf.length - var sum = 0 - var flipped = false - for (var i = len - 1; i > -1; i--) { - var byte = buf[i] - var f - if (flipped) { - f = onesComp(byte) - } else if (byte === 0) { - f = byte - } else { - flipped = true - f = twosComp(byte) - } - if (f !== 0) { - sum -= f * Math.pow(256, len - i - 1) - } - } - return sum -} - -const pos = (buf) => { - var len = buf.length - var sum = 0 - for (var i = len - 1; i > -1; i--) { - var byte = buf[i] - if (byte !== 0) { - sum += byte * Math.pow(256, len - i - 1) - } - } - return sum -} - -const onesComp = byte => (0xff ^ byte) & 0xff - -const twosComp = byte => ((0xff ^ byte) + 1) & 0xff - -module.exports = { - encode, - parse, -} diff --git a/lib/list.js b/lib/list.js deleted file mode 100644 index f2358c25..00000000 --- a/lib/list.js +++ /dev/null @@ -1,139 +0,0 @@ -'use strict' - -// XXX: This shares a lot in common with extract.js -// maybe some DRY opportunity here? - -// tar -t -const hlo = require('./high-level-opt.js') -const Parser = require('./parse.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const stripSlash = require('./strip-trailing-slashes.js') - -module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') { - cb = opt_, files = null, opt_ = {} - } else if (Array.isArray(opt_)) { - files = opt_, opt_ = {} - } - - if (typeof files === 'function') { - cb = files, files = null - } - - if (!files) { - files = [] - } else { - files = Array.from(files) - } - - const opt = hlo(opt_) - - if (opt.sync && typeof cb === 'function') { - throw new TypeError('callback not supported for sync tar functions') - } - - if (!opt.file && typeof cb === 'function') { - throw new TypeError('callback only supported with file option') - } - - if (files.length) { - filesFilter(opt, files) - } - - if (!opt.noResume) { - onentryFunction(opt) - } - - return opt.file && opt.sync ? listFileSync(opt) - : opt.file ? listFile(opt, cb) - : list(opt) -} - -const onentryFunction = opt => { - const onentry = opt.onentry - opt.onentry = onentry ? e => { - onentry(e) - e.resume() - } : e => e.resume() -} - -// construct a filter that limits the file entries listed -// include child entries if a dir is included -const filesFilter = (opt, files) => { - const map = new Map(files.map(f => [stripSlash(f), true])) - const filter = opt.filter - - const mapHas = (file, r) => { - const root = r || path.parse(file).root || '.' - const ret = file === root ? false - : map.has(file) ? map.get(file) - : mapHas(path.dirname(file), root) - - map.set(file, ret) - return ret - } - - opt.filter = filter - ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) - : file => mapHas(stripSlash(file)) -} - -const listFileSync = opt => { - const p = list(opt) - const file = opt.file - let threw = true - let fd - try { - const stat = fs.statSync(file) - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - if (stat.size < readSize) { - p.end(fs.readFileSync(file)) - } else { - let pos = 0 - const buf = Buffer.allocUnsafe(readSize) - fd = fs.openSync(file, 'r') - while (pos < stat.size) { - const bytesRead = fs.readSync(fd, buf, 0, readSize, pos) - pos += bytesRead - p.write(buf.slice(0, bytesRead)) - } - p.end() - } - threw = false - } finally { - if (threw && fd) { - try { - fs.closeSync(fd) - } catch (er) {} - } - } -} - -const listFile = (opt, cb) => { - const parse = new Parser(opt) - const readSize = opt.maxReadSize || 16 * 1024 * 1024 - - const file = opt.file - const p = new Promise((resolve, reject) => { - parse.on('error', reject) - parse.on('end', resolve) - - fs.stat(file, (er, stat) => { - if (er) { - reject(er) - } else { - const stream = new fsm.ReadStream(file, { - readSize: readSize, - size: stat.size, - }) - stream.on('error', reject) - stream.pipe(parse) - } - }) - }) - return cb ? p.then(cb, cb) : p -} - -const list = opt => new Parser(opt) diff --git a/lib/mkdir.js b/lib/mkdir.js deleted file mode 100644 index 8ee8de78..00000000 --- a/lib/mkdir.js +++ /dev/null @@ -1,229 +0,0 @@ -'use strict' -// wrapper around mkdirp for tar's needs. - -// TODO: This should probably be a class, not functionally -// passing around state in a gazillion args. - -const mkdirp = require('mkdirp') -const fs = require('fs') -const path = require('path') -const chownr = require('chownr') -const normPath = require('./normalize-windows-path.js') - -class SymlinkError extends Error { - constructor (symlink, path) { - super('Cannot extract through symbolic link') - this.path = path - this.symlink = symlink - } - - get name () { - return 'SylinkError' - } -} - -class CwdError extends Error { - constructor (path, code) { - super(code + ': Cannot cd into \'' + path + '\'') - this.path = path - this.code = code - } - - get name () { - return 'CwdError' - } -} - -const cGet = (cache, key) => cache.get(normPath(key)) -const cSet = (cache, key, val) => cache.set(normPath(key), val) - -const checkCwd = (dir, cb) => { - fs.stat(dir, (er, st) => { - if (er || !st.isDirectory()) { - er = new CwdError(dir, er && er.code || 'ENOTDIR') - } - cb(er) - }) -} - -module.exports = (dir, opt, cb) => { - dir = normPath(dir) - - // if there's any overlap between mask and mode, - // then we'll need an explicit chmod - const umask = opt.umask - const mode = opt.mode | 0o0700 - const needChmod = (mode & umask) !== 0 - - const uid = opt.uid - const gid = opt.gid - const doChown = typeof uid === 'number' && - typeof gid === 'number' && - (uid !== opt.processUid || gid !== opt.processGid) - - const preserve = opt.preserve - const unlink = opt.unlink - const cache = opt.cache - const cwd = normPath(opt.cwd) - - const done = (er, created) => { - if (er) { - cb(er) - } else { - cSet(cache, dir, true) - if (created && doChown) { - chownr(created, uid, gid, er => done(er)) - } else if (needChmod) { - fs.chmod(dir, mode, cb) - } else { - cb() - } - } - } - - if (cache && cGet(cache, dir) === true) { - return done() - } - - if (dir === cwd) { - return checkCwd(dir, done) - } - - if (preserve) { - return mkdirp(dir, { mode }).then(made => done(null, made), done) - } - - const sub = normPath(path.relative(cwd, dir)) - const parts = sub.split('/') - mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) -} - -const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { - if (!parts.length) { - return cb(null, created) - } - const p = parts.shift() - const part = normPath(path.resolve(base + '/' + p)) - if (cGet(cache, part)) { - return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - } - fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) -} - -const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { - if (er) { - fs.lstat(part, (statEr, st) => { - if (statEr) { - statEr.path = statEr.path && normPath(statEr.path) - cb(statEr) - } else if (st.isDirectory()) { - mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - } else if (unlink) { - fs.unlink(part, er => { - if (er) { - return cb(er) - } - fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) - }) - } else if (st.isSymbolicLink()) { - return cb(new SymlinkError(part, part + '/' + parts.join('/'))) - } else { - cb(er) - } - }) - } else { - created = created || part - mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - } -} - -const checkCwdSync = dir => { - let ok = false - let code = 'ENOTDIR' - try { - ok = fs.statSync(dir).isDirectory() - } catch (er) { - code = er.code - } finally { - if (!ok) { - throw new CwdError(dir, code) - } - } -} - -module.exports.sync = (dir, opt) => { - dir = normPath(dir) - // if there's any overlap between mask and mode, - // then we'll need an explicit chmod - const umask = opt.umask - const mode = opt.mode | 0o0700 - const needChmod = (mode & umask) !== 0 - - const uid = opt.uid - const gid = opt.gid - const doChown = typeof uid === 'number' && - typeof gid === 'number' && - (uid !== opt.processUid || gid !== opt.processGid) - - const preserve = opt.preserve - const unlink = opt.unlink - const cache = opt.cache - const cwd = normPath(opt.cwd) - - const done = (created) => { - cSet(cache, dir, true) - if (created && doChown) { - chownr.sync(created, uid, gid) - } - if (needChmod) { - fs.chmodSync(dir, mode) - } - } - - if (cache && cGet(cache, dir) === true) { - return done() - } - - if (dir === cwd) { - checkCwdSync(cwd) - return done() - } - - if (preserve) { - return done(mkdirp.sync(dir, mode)) - } - - const sub = normPath(path.relative(cwd, dir)) - const parts = sub.split('/') - let created = null - for (let p = parts.shift(), part = cwd; - p && (part += '/' + p); - p = parts.shift()) { - part = normPath(path.resolve(part)) - if (cGet(cache, part)) { - continue - } - - try { - fs.mkdirSync(part, mode) - created = created || part - cSet(cache, part, true) - } catch (er) { - const st = fs.lstatSync(part) - if (st.isDirectory()) { - cSet(cache, part, true) - continue - } else if (unlink) { - fs.unlinkSync(part) - fs.mkdirSync(part, mode) - created = created || part - cSet(cache, part, true) - continue - } else if (st.isSymbolicLink()) { - return new SymlinkError(part, part + '/' + parts.join('/')) - } - } - } - - return done(created) -} diff --git a/lib/mode-fix.js b/lib/mode-fix.js deleted file mode 100644 index 42f1d6e6..00000000 --- a/lib/mode-fix.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' -module.exports = (mode, isDir, portable) => { - mode &= 0o7777 - - // in portable mode, use the minimum reasonable umask - // if this system creates files with 0o664 by default - // (as some linux distros do), then we'll write the - // archive with 0o644 instead. Also, don't ever create - // a file that is not readable/writable by the owner. - if (portable) { - mode = (mode | 0o600) & ~0o22 - } - - // if dirs are readable, then they should be listable - if (isDir) { - if (mode & 0o400) { - mode |= 0o100 - } - if (mode & 0o40) { - mode |= 0o10 - } - if (mode & 0o4) { - mode |= 0o1 - } - } - return mode -} diff --git a/lib/normalize-unicode.js b/lib/normalize-unicode.js deleted file mode 100644 index 79e285ab..00000000 --- a/lib/normalize-unicode.js +++ /dev/null @@ -1,12 +0,0 @@ -// warning: extremely hot code path. -// This has been meticulously optimized for use -// within npm install on large package trees. -// Do not edit without careful benchmarking. -const normalizeCache = Object.create(null) -const { hasOwnProperty } = Object.prototype -module.exports = s => { - if (!hasOwnProperty.call(normalizeCache, s)) { - normalizeCache[s] = s.normalize('NFD') - } - return normalizeCache[s] -} diff --git a/lib/normalize-windows-path.js b/lib/normalize-windows-path.js deleted file mode 100644 index eb13ba01..00000000 --- a/lib/normalize-windows-path.js +++ /dev/null @@ -1,8 +0,0 @@ -// on windows, either \ or / are valid directory separators. -// on unix, \ is a valid character in filenames. -// so, on windows, and only on windows, we replace all \ chars with /, -// so that we can use / as our one and only directory separator char. - -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -module.exports = platform !== 'win32' ? p => p - : p => p && p.replace(/\\/g, '/') diff --git a/lib/pack.js b/lib/pack.js deleted file mode 100644 index d533a068..00000000 --- a/lib/pack.js +++ /dev/null @@ -1,432 +0,0 @@ -'use strict' - -// A readable tar stream creator -// Technically, this is a transform stream that you write paths into, -// and tar format comes out of. -// The `add()` method is like `write()` but returns this, -// and end() return `this` as well, so you can -// do `new Pack(opt).add('files').add('dir').end().pipe(output) -// You could also do something like: -// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) - -class PackJob { - constructor (path, absolute) { - this.path = path || './' - this.absolute = absolute - this.entry = null - this.stat = null - this.readdir = null - this.pending = false - this.ignore = false - this.piped = false - } -} - -const { Minipass } = require('minipass') -const zlib = require('minizlib') -const ReadEntry = require('./read-entry.js') -const WriteEntry = require('./write-entry.js') -const WriteEntrySync = WriteEntry.Sync -const WriteEntryTar = WriteEntry.Tar -const Yallist = require('yallist') -const EOF = Buffer.alloc(1024) -const ONSTAT = Symbol('onStat') -const ENDED = Symbol('ended') -const QUEUE = Symbol('queue') -const CURRENT = Symbol('current') -const PROCESS = Symbol('process') -const PROCESSING = Symbol('processing') -const PROCESSJOB = Symbol('processJob') -const JOBS = Symbol('jobs') -const JOBDONE = Symbol('jobDone') -const ADDFSENTRY = Symbol('addFSEntry') -const ADDTARENTRY = Symbol('addTarEntry') -const STAT = Symbol('stat') -const READDIR = Symbol('readdir') -const ONREADDIR = Symbol('onreaddir') -const PIPE = Symbol('pipe') -const ENTRY = Symbol('entry') -const ENTRYOPT = Symbol('entryOpt') -const WRITEENTRYCLASS = Symbol('writeEntryClass') -const WRITE = Symbol('write') -const ONDRAIN = Symbol('ondrain') - -const fs = require('fs') -const path = require('path') -const warner = require('./warn-mixin.js') -const normPath = require('./normalize-windows-path.js') - -const Pack = warner(class Pack extends Minipass { - constructor (opt) { - super(opt) - opt = opt || Object.create(null) - this.opt = opt - this.file = opt.file || '' - this.cwd = opt.cwd || process.cwd() - this.maxReadSize = opt.maxReadSize - this.preservePaths = !!opt.preservePaths - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.prefix = normPath(opt.prefix || '') - this.linkCache = opt.linkCache || new Map() - this.statCache = opt.statCache || new Map() - this.readdirCache = opt.readdirCache || new Map() - - this[WRITEENTRYCLASS] = WriteEntry - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - this.portable = !!opt.portable - this.zip = null - - if (opt.gzip || opt.brotli) { - if (opt.gzip && opt.brotli) { - throw new TypeError('gzip and brotli are mutually exclusive') - } - if (opt.gzip) { - if (typeof opt.gzip !== 'object') { - opt.gzip = {} - } - if (this.portable) { - opt.gzip.portable = true - } - this.zip = new zlib.Gzip(opt.gzip) - } - if (opt.brotli) { - if (typeof opt.brotli !== 'object') { - opt.brotli = {} - } - this.zip = new zlib.BrotliCompress(opt.brotli) - } - this.zip.on('data', chunk => super.write(chunk)) - this.zip.on('end', _ => super.end()) - this.zip.on('drain', _ => this[ONDRAIN]()) - this.on('resume', _ => this.zip.resume()) - } else { - this.on('drain', this[ONDRAIN]) - } - - this.noDirRecurse = !!opt.noDirRecurse - this.follow = !!opt.follow - this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null - - this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true - - this[QUEUE] = new Yallist() - this[JOBS] = 0 - this.jobs = +opt.jobs || 4 - this[PROCESSING] = false - this[ENDED] = false - } - - [WRITE] (chunk) { - return super.write(chunk) - } - - add (path) { - this.write(path) - return this - } - - end (path) { - if (path) { - this.write(path) - } - this[ENDED] = true - this[PROCESS]() - return this - } - - write (path) { - if (this[ENDED]) { - throw new Error('write after end') - } - - if (path instanceof ReadEntry) { - this[ADDTARENTRY](path) - } else { - this[ADDFSENTRY](path) - } - return this.flowing - } - - [ADDTARENTRY] (p) { - const absolute = normPath(path.resolve(this.cwd, p.path)) - // in this case, we don't have to wait for the stat - if (!this.filter(p.path, p)) { - p.resume() - } else { - const job = new PackJob(p.path, absolute, false) - job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) - job.entry.on('end', _ => this[JOBDONE](job)) - this[JOBS] += 1 - this[QUEUE].push(job) - } - - this[PROCESS]() - } - - [ADDFSENTRY] (p) { - const absolute = normPath(path.resolve(this.cwd, p)) - this[QUEUE].push(new PackJob(p, absolute)) - this[PROCESS]() - } - - [STAT] (job) { - job.pending = true - this[JOBS] += 1 - const stat = this.follow ? 'stat' : 'lstat' - fs[stat](job.absolute, (er, stat) => { - job.pending = false - this[JOBS] -= 1 - if (er) { - this.emit('error', er) - } else { - this[ONSTAT](job, stat) - } - }) - } - - [ONSTAT] (job, stat) { - this.statCache.set(job.absolute, stat) - job.stat = stat - - // now we have the stat, we can filter it. - if (!this.filter(job.path, stat)) { - job.ignore = true - } - - this[PROCESS]() - } - - [READDIR] (job) { - job.pending = true - this[JOBS] += 1 - fs.readdir(job.absolute, (er, entries) => { - job.pending = false - this[JOBS] -= 1 - if (er) { - return this.emit('error', er) - } - this[ONREADDIR](job, entries) - }) - } - - [ONREADDIR] (job, entries) { - this.readdirCache.set(job.absolute, entries) - job.readdir = entries - this[PROCESS]() - } - - [PROCESS] () { - if (this[PROCESSING]) { - return - } - - this[PROCESSING] = true - for (let w = this[QUEUE].head; - w !== null && this[JOBS] < this.jobs; - w = w.next) { - this[PROCESSJOB](w.value) - if (w.value.ignore) { - const p = w.next - this[QUEUE].removeNode(w) - w.next = p - } - } - - this[PROCESSING] = false - - if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { - if (this.zip) { - this.zip.end(EOF) - } else { - super.write(EOF) - super.end() - } - } - } - - get [CURRENT] () { - return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value - } - - [JOBDONE] (job) { - this[QUEUE].shift() - this[JOBS] -= 1 - this[PROCESS]() - } - - [PROCESSJOB] (job) { - if (job.pending) { - return - } - - if (job.entry) { - if (job === this[CURRENT] && !job.piped) { - this[PIPE](job) - } - return - } - - if (!job.stat) { - if (this.statCache.has(job.absolute)) { - this[ONSTAT](job, this.statCache.get(job.absolute)) - } else { - this[STAT](job) - } - } - if (!job.stat) { - return - } - - // filtered out! - if (job.ignore) { - return - } - - if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { - if (this.readdirCache.has(job.absolute)) { - this[ONREADDIR](job, this.readdirCache.get(job.absolute)) - } else { - this[READDIR](job) - } - if (!job.readdir) { - return - } - } - - // we know it doesn't have an entry, because that got checked above - job.entry = this[ENTRY](job) - if (!job.entry) { - job.ignore = true - return - } - - if (job === this[CURRENT] && !job.piped) { - this[PIPE](job) - } - } - - [ENTRYOPT] (job) { - return { - onwarn: (code, msg, data) => this.warn(code, msg, data), - noPax: this.noPax, - cwd: this.cwd, - absolute: job.absolute, - preservePaths: this.preservePaths, - maxReadSize: this.maxReadSize, - strict: this.strict, - portable: this.portable, - linkCache: this.linkCache, - statCache: this.statCache, - noMtime: this.noMtime, - mtime: this.mtime, - prefix: this.prefix, - } - } - - [ENTRY] (job) { - this[JOBS] += 1 - try { - return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) - .on('end', () => this[JOBDONE](job)) - .on('error', er => this.emit('error', er)) - } catch (er) { - this.emit('error', er) - } - } - - [ONDRAIN] () { - if (this[CURRENT] && this[CURRENT].entry) { - this[CURRENT].entry.resume() - } - } - - // like .pipe() but using super, because our write() is special - [PIPE] (job) { - job.piped = true - - if (job.readdir) { - job.readdir.forEach(entry => { - const p = job.path - const base = p === './' ? '' : p.replace(/\/*$/, '/') - this[ADDFSENTRY](base + entry) - }) - } - - const source = job.entry - const zip = this.zip - - if (zip) { - source.on('data', chunk => { - if (!zip.write(chunk)) { - source.pause() - } - }) - } else { - source.on('data', chunk => { - if (!super.write(chunk)) { - source.pause() - } - }) - } - } - - pause () { - if (this.zip) { - this.zip.pause() - } - return super.pause() - } -}) - -class PackSync extends Pack { - constructor (opt) { - super(opt) - this[WRITEENTRYCLASS] = WriteEntrySync - } - - // pause/resume are no-ops in sync streams. - pause () {} - resume () {} - - [STAT] (job) { - const stat = this.follow ? 'statSync' : 'lstatSync' - this[ONSTAT](job, fs[stat](job.absolute)) - } - - [READDIR] (job, stat) { - this[ONREADDIR](job, fs.readdirSync(job.absolute)) - } - - // gotta get it all in this tick - [PIPE] (job) { - const source = job.entry - const zip = this.zip - - if (job.readdir) { - job.readdir.forEach(entry => { - const p = job.path - const base = p === './' ? '' : p.replace(/\/*$/, '/') - this[ADDFSENTRY](base + entry) - }) - } - - if (zip) { - source.on('data', chunk => { - zip.write(chunk) - }) - } else { - source.on('data', chunk => { - super[WRITE](chunk) - }) - } - } -} - -Pack.Sync = PackSync - -module.exports = Pack diff --git a/lib/parse.js b/lib/parse.js deleted file mode 100644 index 94e53042..00000000 --- a/lib/parse.js +++ /dev/null @@ -1,552 +0,0 @@ -'use strict' - -// this[BUFFER] is the remainder of a chunk if we're waiting for -// the full 512 bytes of a header to come in. We will Buffer.concat() -// it to the next write(), which is a mem copy, but a small one. -// -// this[QUEUE] is a Yallist of entries that haven't been emitted -// yet this can only get filled up if the user keeps write()ing after -// a write() returns false, or does a write() with more than one entry -// -// We don't buffer chunks, we always parse them and either create an -// entry, or push it into the active entry. The ReadEntry class knows -// to throw data away if .ignore=true -// -// Shift entry off the buffer when it emits 'end', and emit 'entry' for -// the next one in the list. -// -// At any time, we're pushing body chunks into the entry at WRITEENTRY, -// and waiting for 'end' on the entry at READENTRY -// -// ignored entries get .resume() called on them straight away - -const warner = require('./warn-mixin.js') -const Header = require('./header.js') -const EE = require('events') -const Yallist = require('yallist') -const maxMetaEntrySize = 1024 * 1024 -const Entry = require('./read-entry.js') -const Pax = require('./pax.js') -const zlib = require('minizlib') -const { nextTick } = require('process') - -const gzipHeader = Buffer.from([0x1f, 0x8b]) -const STATE = Symbol('state') -const WRITEENTRY = Symbol('writeEntry') -const READENTRY = Symbol('readEntry') -const NEXTENTRY = Symbol('nextEntry') -const PROCESSENTRY = Symbol('processEntry') -const EX = Symbol('extendedHeader') -const GEX = Symbol('globalExtendedHeader') -const META = Symbol('meta') -const EMITMETA = Symbol('emitMeta') -const BUFFER = Symbol('buffer') -const QUEUE = Symbol('queue') -const ENDED = Symbol('ended') -const EMITTEDEND = Symbol('emittedEnd') -const EMIT = Symbol('emit') -const UNZIP = Symbol('unzip') -const CONSUMECHUNK = Symbol('consumeChunk') -const CONSUMECHUNKSUB = Symbol('consumeChunkSub') -const CONSUMEBODY = Symbol('consumeBody') -const CONSUMEMETA = Symbol('consumeMeta') -const CONSUMEHEADER = Symbol('consumeHeader') -const CONSUMING = Symbol('consuming') -const BUFFERCONCAT = Symbol('bufferConcat') -const MAYBEEND = Symbol('maybeEnd') -const WRITING = Symbol('writing') -const ABORTED = Symbol('aborted') -const DONE = Symbol('onDone') -const SAW_VALID_ENTRY = Symbol('sawValidEntry') -const SAW_NULL_BLOCK = Symbol('sawNullBlock') -const SAW_EOF = Symbol('sawEOF') -const CLOSESTREAM = Symbol('closeStream') - -const noop = _ => true - -module.exports = warner(class Parser extends EE { - constructor (opt) { - opt = opt || {} - super(opt) - - this.file = opt.file || '' - - // set to boolean false when an entry starts. 1024 bytes of \0 - // is technically a valid tarball, albeit a boring one. - this[SAW_VALID_ENTRY] = null - - // these BADARCHIVE errors can't be detected early. listen on DONE. - this.on(DONE, _ => { - if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) { - // either less than 1 block of data, or all entries were invalid. - // Either way, probably not even a tarball. - this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format') - } - }) - - if (opt.ondone) { - this.on(DONE, opt.ondone) - } else { - this.on(DONE, _ => { - this.emit('prefinish') - this.emit('finish') - this.emit('end') - }) - } - - this.strict = !!opt.strict - this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize - this.filter = typeof opt.filter === 'function' ? opt.filter : noop - // Unlike gzip, brotli doesn't have any magic bytes to identify it - // Users need to explicitly tell us they're extracting a brotli file - // Or we infer from the file extension - const isTBR = (opt.file && ( - opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'))) - // if it's a tbr file it MIGHT be brotli, but we don't know until - // we look at it and verify it's not a valid tar file. - this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli - : isTBR ? undefined - : false - - // have to set this so that streams are ok piping into it - this.writable = true - this.readable = false - - this[QUEUE] = new Yallist() - this[BUFFER] = null - this[READENTRY] = null - this[WRITEENTRY] = null - this[STATE] = 'begin' - this[META] = '' - this[EX] = null - this[GEX] = null - this[ENDED] = false - this[UNZIP] = null - this[ABORTED] = false - this[SAW_NULL_BLOCK] = false - this[SAW_EOF] = false - - this.on('end', () => this[CLOSESTREAM]()) - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - if (typeof opt.onentry === 'function') { - this.on('entry', opt.onentry) - } - } - - [CONSUMEHEADER] (chunk, position) { - if (this[SAW_VALID_ENTRY] === null) { - this[SAW_VALID_ENTRY] = false - } - let header - try { - header = new Header(chunk, position, this[EX], this[GEX]) - } catch (er) { - return this.warn('TAR_ENTRY_INVALID', er) - } - - if (header.nullBlock) { - if (this[SAW_NULL_BLOCK]) { - this[SAW_EOF] = true - // ending an archive with no entries. pointless, but legal. - if (this[STATE] === 'begin') { - this[STATE] = 'header' - } - this[EMIT]('eof') - } else { - this[SAW_NULL_BLOCK] = true - this[EMIT]('nullBlock') - } - } else { - this[SAW_NULL_BLOCK] = false - if (!header.cksumValid) { - this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }) - } else if (!header.path) { - this.warn('TAR_ENTRY_INVALID', 'path is required', { header }) - } else { - const type = header.type - if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { - this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header }) - } else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) { - this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header }) - } else { - const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) - - // we do this for meta & ignored entries as well, because they - // are still valid tar, or else we wouldn't know to ignore them - if (!this[SAW_VALID_ENTRY]) { - if (entry.remain) { - // this might be the one! - const onend = () => { - if (!entry.invalid) { - this[SAW_VALID_ENTRY] = true - } - } - entry.on('end', onend) - } else { - this[SAW_VALID_ENTRY] = true - } - } - - if (entry.meta) { - if (entry.size > this.maxMetaEntrySize) { - entry.ignore = true - this[EMIT]('ignoredEntry', entry) - this[STATE] = 'ignore' - entry.resume() - } else if (entry.size > 0) { - this[META] = '' - entry.on('data', c => this[META] += c) - this[STATE] = 'meta' - } - } else { - this[EX] = null - entry.ignore = entry.ignore || !this.filter(entry.path, entry) - - if (entry.ignore) { - // probably valid, just not something we care about - this[EMIT]('ignoredEntry', entry) - this[STATE] = entry.remain ? 'ignore' : 'header' - entry.resume() - } else { - if (entry.remain) { - this[STATE] = 'body' - } else { - this[STATE] = 'header' - entry.end() - } - - if (!this[READENTRY]) { - this[QUEUE].push(entry) - this[NEXTENTRY]() - } else { - this[QUEUE].push(entry) - } - } - } - } - } - } - } - - [CLOSESTREAM] () { - nextTick(() => this.emit('close')) - } - - [PROCESSENTRY] (entry) { - let go = true - - if (!entry) { - this[READENTRY] = null - go = false - } else if (Array.isArray(entry)) { - this.emit.apply(this, entry) - } else { - this[READENTRY] = entry - this.emit('entry', entry) - if (!entry.emittedEnd) { - entry.on('end', _ => this[NEXTENTRY]()) - go = false - } - } - - return go - } - - [NEXTENTRY] () { - do {} while (this[PROCESSENTRY](this[QUEUE].shift())) - - if (!this[QUEUE].length) { - // At this point, there's nothing in the queue, but we may have an - // entry which is being consumed (readEntry). - // If we don't, then we definitely can handle more data. - // If we do, and either it's flowing, or it has never had any data - // written to it, then it needs more. - // The only other possibility is that it has returned false from a - // write() call, so we wait for the next drain to continue. - const re = this[READENTRY] - const drainNow = !re || re.flowing || re.size === re.remain - if (drainNow) { - if (!this[WRITING]) { - this.emit('drain') - } - } else { - re.once('drain', _ => this.emit('drain')) - } - } - } - - [CONSUMEBODY] (chunk, position) { - // write up to but no more than writeEntry.blockRemain - const entry = this[WRITEENTRY] - const br = entry.blockRemain - const c = (br >= chunk.length && position === 0) ? chunk - : chunk.slice(position, position + br) - - entry.write(c) - - if (!entry.blockRemain) { - this[STATE] = 'header' - this[WRITEENTRY] = null - entry.end() - } - - return c.length - } - - [CONSUMEMETA] (chunk, position) { - const entry = this[WRITEENTRY] - const ret = this[CONSUMEBODY](chunk, position) - - // if we finished, then the entry is reset - if (!this[WRITEENTRY]) { - this[EMITMETA](entry) - } - - return ret - } - - [EMIT] (ev, data, extra) { - if (!this[QUEUE].length && !this[READENTRY]) { - this.emit(ev, data, extra) - } else { - this[QUEUE].push([ev, data, extra]) - } - } - - [EMITMETA] (entry) { - this[EMIT]('meta', this[META]) - switch (entry.type) { - case 'ExtendedHeader': - case 'OldExtendedHeader': - this[EX] = Pax.parse(this[META], this[EX], false) - break - - case 'GlobalExtendedHeader': - this[GEX] = Pax.parse(this[META], this[GEX], true) - break - - case 'NextFileHasLongPath': - case 'OldGnuLongPath': - this[EX] = this[EX] || Object.create(null) - this[EX].path = this[META].replace(/\0.*/, '') - break - - case 'NextFileHasLongLinkpath': - this[EX] = this[EX] || Object.create(null) - this[EX].linkpath = this[META].replace(/\0.*/, '') - break - - /* istanbul ignore next */ - default: throw new Error('unknown meta: ' + entry.type) - } - } - - abort (error) { - this[ABORTED] = true - this.emit('abort', error) - // always throws, even in non-strict mode - this.warn('TAR_ABORT', error, { recoverable: false }) - } - - write (chunk) { - if (this[ABORTED]) { - return - } - - // first write, might be gzipped - const needSniff = this[UNZIP] === null || - this.brotli === undefined && this[UNZIP] === false - if (needSniff && chunk) { - if (this[BUFFER]) { - chunk = Buffer.concat([this[BUFFER], chunk]) - this[BUFFER] = null - } - if (chunk.length < gzipHeader.length) { - this[BUFFER] = chunk - return true - } - - // look for gzip header - for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { - if (chunk[i] !== gzipHeader[i]) { - this[UNZIP] = false - } - } - - const maybeBrotli = this.brotli === undefined - if (this[UNZIP] === false && maybeBrotli) { - // read the first header to see if it's a valid tar file. If so, - // we can safely assume that it's not actually brotli, despite the - // .tbr or .tar.br file extension. - // if we ended before getting a full chunk, yes, def brotli - if (chunk.length < 512) { - if (this[ENDED]) { - this.brotli = true - } else { - this[BUFFER] = chunk - return true - } - } else { - // if it's tar, it's pretty reliably not brotli, chances of - // that happening are astronomical. - try { - new Header(chunk.slice(0, 512)) - this.brotli = false - } catch (_) { - this.brotli = true - } - } - } - - if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) { - const ended = this[ENDED] - this[ENDED] = false - this[UNZIP] = this[UNZIP] === null - ? new zlib.Unzip() - : new zlib.BrotliDecompress() - this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) - this[UNZIP].on('error', er => this.abort(er)) - this[UNZIP].on('end', _ => { - this[ENDED] = true - this[CONSUMECHUNK]() - }) - this[WRITING] = true - const ret = this[UNZIP][ended ? 'end' : 'write'](chunk) - this[WRITING] = false - return ret - } - } - - this[WRITING] = true - if (this[UNZIP]) { - this[UNZIP].write(chunk) - } else { - this[CONSUMECHUNK](chunk) - } - this[WRITING] = false - - // return false if there's a queue, or if the current entry isn't flowing - const ret = - this[QUEUE].length ? false : - this[READENTRY] ? this[READENTRY].flowing : - true - - // if we have no queue, then that means a clogged READENTRY - if (!ret && !this[QUEUE].length) { - this[READENTRY].once('drain', _ => this.emit('drain')) - } - - return ret - } - - [BUFFERCONCAT] (c) { - if (c && !this[ABORTED]) { - this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c - } - } - - [MAYBEEND] () { - if (this[ENDED] && - !this[EMITTEDEND] && - !this[ABORTED] && - !this[CONSUMING]) { - this[EMITTEDEND] = true - const entry = this[WRITEENTRY] - if (entry && entry.blockRemain) { - // truncated, likely a damaged file - const have = this[BUFFER] ? this[BUFFER].length : 0 - this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${ - entry.blockRemain} more bytes, only ${have} available)`, { entry }) - if (this[BUFFER]) { - entry.write(this[BUFFER]) - } - entry.end() - } - this[EMIT](DONE) - } - } - - [CONSUMECHUNK] (chunk) { - if (this[CONSUMING]) { - this[BUFFERCONCAT](chunk) - } else if (!chunk && !this[BUFFER]) { - this[MAYBEEND]() - } else { - this[CONSUMING] = true - if (this[BUFFER]) { - this[BUFFERCONCAT](chunk) - const c = this[BUFFER] - this[BUFFER] = null - this[CONSUMECHUNKSUB](c) - } else { - this[CONSUMECHUNKSUB](chunk) - } - - while (this[BUFFER] && - this[BUFFER].length >= 512 && - !this[ABORTED] && - !this[SAW_EOF]) { - const c = this[BUFFER] - this[BUFFER] = null - this[CONSUMECHUNKSUB](c) - } - this[CONSUMING] = false - } - - if (!this[BUFFER] || this[ENDED]) { - this[MAYBEEND]() - } - } - - [CONSUMECHUNKSUB] (chunk) { - // we know that we are in CONSUMING mode, so anything written goes into - // the buffer. Advance the position and put any remainder in the buffer. - let position = 0 - const length = chunk.length - while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) { - switch (this[STATE]) { - case 'begin': - case 'header': - this[CONSUMEHEADER](chunk, position) - position += 512 - break - - case 'ignore': - case 'body': - position += this[CONSUMEBODY](chunk, position) - break - - case 'meta': - position += this[CONSUMEMETA](chunk, position) - break - - /* istanbul ignore next */ - default: - throw new Error('invalid state: ' + this[STATE]) - } - } - - if (position < length) { - if (this[BUFFER]) { - this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) - } else { - this[BUFFER] = chunk.slice(position) - } - } - } - - end (chunk) { - if (!this[ABORTED]) { - if (this[UNZIP]) { - this[UNZIP].end(chunk) - } else { - this[ENDED] = true - if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0) - this.write(chunk) - } - } - } -}) diff --git a/lib/path-reservations.js b/lib/path-reservations.js deleted file mode 100644 index 62890060..00000000 --- a/lib/path-reservations.js +++ /dev/null @@ -1,163 +0,0 @@ -// A path exclusive reservation system -// reserve([list, of, paths], fn) -// When the fn is first in line for all its paths, it -// is called with a cb that clears the reservation. -// -// Used by async unpack to avoid clobbering paths in use, -// while still allowing maximal safe parallelization. - -const assert = require('assert') -const normalize = require('./normalize-unicode.js') -const stripSlashes = require('./strip-trailing-slashes.js') -const { join } = require('path') - -const platform = - process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -const isWindows = platform === 'win32' - -module.exports = () => { - // path => [function or Set] - // A Set object means a directory reservation - // A fn is a direct reservation on that path - const queues = new Map() - - // fn => {paths:[path,...], dirs:[path, ...]} - const reservations = new Map() - - // return a set of parent dirs for a given path - // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] - const getDirs = path => { - const dirs = path - .split('/') - .slice(0, -1) - .reduce((set, path) => { - if (set.length) { - path = join(set[set.length - 1], path) - } - set.push(path || '/') - return set - }, []) - return dirs - } - - // functions currently running - const running = new Set() - - // return the queues for each path the function cares about - // fn => {paths, dirs} - const getQueues = fn => { - const res = reservations.get(fn) - /* istanbul ignore if - unpossible */ - if (!res) { - throw new Error('function does not have any path reservations') - } - return { - paths: res.paths.map(path => queues.get(path)), - dirs: [...res.dirs].map(path => queues.get(path)), - } - } - - // check if fn is first in line for all its paths, and is - // included in the first set for all its dir queues - const check = fn => { - const { paths, dirs } = getQueues(fn) - return ( - paths.every(q => q && q[0] === fn) && - dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)) - ) - } - - // run the function if it's first in line and not already running - const run = fn => { - if (running.has(fn) || !check(fn)) { - return false - } - running.add(fn) - fn(() => clear(fn)) - return true - } - - const clear = fn => { - if (!running.has(fn)) { - return false - } - - const { paths, dirs } = reservations.get(fn) - const next = new Set() - - paths.forEach(path => { - const q = queues.get(path) - assert.equal(q[0], fn) - if (q.length === 1) { - queues.delete(path) - } else { - q.shift() - if (typeof q[0] === 'function') { - next.add(q[0]) - } else { - q[0].forEach(fn => next.add(fn)) - } - } - }) - - dirs.forEach(dir => { - const q = queues.get(dir) - assert(q[0] instanceof Set) - if (q[0].size === 1 && q.length === 1) { - queues.delete(dir) - } else if (q[0].size === 1) { - q.shift() - - next.add(q[0]) - } else { - q[0].delete(fn) - } - }) - running.delete(fn) - - next.forEach(fn => run(fn)) - return true - } - - const reserve = (paths, fn) => { - // collide on matches across case and unicode normalization - // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally - // impossible to determine whether two paths refer to the same thing on - // disk, without asking the kernel for a shortname. - // So, we just pretend that every path matches every other path here, - // effectively removing all parallelization on windows. - paths = isWindows - ? ['win32 parallelization disabled'] - : paths.map(p => { - // don't need normPath, because we skip this entirely for windows - return stripSlashes(join(normalize(p))).toLowerCase() - }) - - const dirs = new Set( - paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)), - ) - reservations.set(fn, { dirs, paths }) - paths.forEach(path => { - const q = queues.get(path) - if (!q) { - queues.set(path, [fn]) - } else { - q.push(fn) - } - }) - dirs.forEach(dir => { - const q = queues.get(dir) - if (!q) { - queues.set(dir, [new Set([fn])]) - } else if (q[q.length - 1] instanceof Set) { - q[q.length - 1].add(fn) - } else { - q.push(new Set([fn])) - } - }) - - return run(fn) - } - - return { check, reserve } -} diff --git a/lib/pax.js b/lib/pax.js deleted file mode 100644 index 4a7ca853..00000000 --- a/lib/pax.js +++ /dev/null @@ -1,150 +0,0 @@ -'use strict' -const Header = require('./header.js') -const path = require('path') - -class Pax { - constructor (obj, global) { - this.atime = obj.atime || null - this.charset = obj.charset || null - this.comment = obj.comment || null - this.ctime = obj.ctime || null - this.gid = obj.gid || null - this.gname = obj.gname || null - this.linkpath = obj.linkpath || null - this.mtime = obj.mtime || null - this.path = obj.path || null - this.size = obj.size || null - this.uid = obj.uid || null - this.uname = obj.uname || null - this.dev = obj.dev || null - this.ino = obj.ino || null - this.nlink = obj.nlink || null - this.global = global || false - } - - encode () { - const body = this.encodeBody() - if (body === '') { - return null - } - - const bodyLen = Buffer.byteLength(body) - // round up to 512 bytes - // add 512 for header - const bufLen = 512 * Math.ceil(1 + bodyLen / 512) - const buf = Buffer.allocUnsafe(bufLen) - - // 0-fill the header section, it might not hit every field - for (let i = 0; i < 512; i++) { - buf[i] = 0 - } - - new Header({ - // XXX split the path - // then the path should be PaxHeader + basename, but less than 99, - // prepend with the dirname - path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99), - mode: this.mode || 0o644, - uid: this.uid || null, - gid: this.gid || null, - size: bodyLen, - mtime: this.mtime || null, - type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', - linkpath: '', - uname: this.uname || '', - gname: this.gname || '', - devmaj: 0, - devmin: 0, - atime: this.atime || null, - ctime: this.ctime || null, - }).encode(buf) - - buf.write(body, 512, bodyLen, 'utf8') - - // null pad after the body - for (let i = bodyLen + 512; i < buf.length; i++) { - buf[i] = 0 - } - - return buf - } - - encodeBody () { - return ( - this.encodeField('path') + - this.encodeField('ctime') + - this.encodeField('atime') + - this.encodeField('dev') + - this.encodeField('ino') + - this.encodeField('nlink') + - this.encodeField('charset') + - this.encodeField('comment') + - this.encodeField('gid') + - this.encodeField('gname') + - this.encodeField('linkpath') + - this.encodeField('mtime') + - this.encodeField('size') + - this.encodeField('uid') + - this.encodeField('uname') - ) - } - - encodeField (field) { - if (this[field] === null || this[field] === undefined) { - return '' - } - const v = this[field] instanceof Date ? this[field].getTime() / 1000 - : this[field] - const s = ' ' + - (field === 'dev' || field === 'ino' || field === 'nlink' - ? 'SCHILY.' : '') + - field + '=' + v + '\n' - const byteLen = Buffer.byteLength(s) - // the digits includes the length of the digits in ascii base-10 - // so if it's 9 characters, then adding 1 for the 9 makes it 10 - // which makes it 11 chars. - let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1 - if (byteLen + digits >= Math.pow(10, digits)) { - digits += 1 - } - const len = digits + byteLen - return len + s - } -} - -Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g) - -const merge = (a, b) => - b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a - -const parseKV = string => - string - .replace(/\n$/, '') - .split('\n') - .reduce(parseKVLine, Object.create(null)) - -const parseKVLine = (set, line) => { - const n = parseInt(line, 10) - - // XXX Values with \n in them will fail this. - // Refactor to not be a naive line-by-line parse. - if (n !== Buffer.byteLength(line) + 1) { - return set - } - - line = line.slice((n + ' ').length) - const kv = line.split('=') - const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1') - if (!k) { - return set - } - - const v = kv.join('=') - set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) - ? new Date(v * 1000) - : /^[0-9]+$/.test(v) ? +v - : v - return set -} - -module.exports = Pax diff --git a/lib/read-entry.js b/lib/read-entry.js deleted file mode 100644 index 6186266e..00000000 --- a/lib/read-entry.js +++ /dev/null @@ -1,107 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const normPath = require('./normalize-windows-path.js') - -const SLURP = Symbol('slurp') -module.exports = class ReadEntry extends Minipass { - constructor (header, ex, gex) { - super() - // read entries always start life paused. this is to avoid the - // situation where Minipass's auto-ending empty streams results - // in an entry ending before we're ready for it. - this.pause() - this.extended = ex - this.globalExtended = gex - this.header = header - this.startBlockSize = 512 * Math.ceil(header.size / 512) - this.blockRemain = this.startBlockSize - this.remain = header.size - this.type = header.type - this.meta = false - this.ignore = false - switch (this.type) { - case 'File': - case 'OldFile': - case 'Link': - case 'SymbolicLink': - case 'CharacterDevice': - case 'BlockDevice': - case 'Directory': - case 'FIFO': - case 'ContiguousFile': - case 'GNUDumpDir': - break - - case 'NextFileHasLongLinkpath': - case 'NextFileHasLongPath': - case 'OldGnuLongPath': - case 'GlobalExtendedHeader': - case 'ExtendedHeader': - case 'OldExtendedHeader': - this.meta = true - break - - // NOTE: gnutar and bsdtar treat unrecognized types as 'File' - // it may be worth doing the same, but with a warning. - default: - this.ignore = true - } - - this.path = normPath(header.path) - this.mode = header.mode - if (this.mode) { - this.mode = this.mode & 0o7777 - } - this.uid = header.uid - this.gid = header.gid - this.uname = header.uname - this.gname = header.gname - this.size = header.size - this.mtime = header.mtime - this.atime = header.atime - this.ctime = header.ctime - this.linkpath = normPath(header.linkpath) - this.uname = header.uname - this.gname = header.gname - - if (ex) { - this[SLURP](ex) - } - if (gex) { - this[SLURP](gex, true) - } - } - - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) { - throw new Error('writing more to entry than is appropriate') - } - - const r = this.remain - const br = this.blockRemain - this.remain = Math.max(0, r - writeLen) - this.blockRemain = Math.max(0, br - writeLen) - if (this.ignore) { - return true - } - - if (r >= writeLen) { - return super.write(data) - } - - // r < writeLen - return super.write(data.slice(0, r)) - } - - [SLURP] (ex, global) { - for (const k in ex) { - // we slurp in everything except for the path attribute in - // a global extended header, because that's weird. - if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) { - this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] - } - } - } -} diff --git a/lib/replace.js b/lib/replace.js deleted file mode 100644 index 8db6800b..00000000 --- a/lib/replace.js +++ /dev/null @@ -1,246 +0,0 @@ -'use strict' - -// tar -r -const hlo = require('./high-level-opt.js') -const Pack = require('./pack.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const t = require('./list.js') -const path = require('path') - -// starting at the head of the file, read a Header -// If the checksum is invalid, that's our position to start writing -// If it is, jump forward by the specified size (round up to 512) -// and try again. -// Write the new Pack stream starting there. - -const Header = require('./header.js') - -module.exports = (opt_, files, cb) => { - const opt = hlo(opt_) - - if (!opt.file) { - throw new TypeError('file is required') - } - - if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { - throw new TypeError('cannot append to compressed archives') - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - return opt.sync ? replaceSync(opt, files) - : replace(opt, files, cb) -} - -const replaceSync = (opt, files) => { - const p = new Pack.Sync(opt) - - let threw = true - let fd - let position - - try { - try { - fd = fs.openSync(opt.file, 'r+') - } catch (er) { - if (er.code === 'ENOENT') { - fd = fs.openSync(opt.file, 'w+') - } else { - throw er - } - } - - const st = fs.fstatSync(fd) - const headBuf = Buffer.alloc(512) - - POSITION: for (position = 0; position < st.size; position += 512) { - for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { - bytes = fs.readSync( - fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos - ) - - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { - throw new Error('cannot append to compressed archives') - } - - if (!bytes) { - break POSITION - } - } - - const h = new Header(headBuf) - if (!h.cksumValid) { - break - } - const entryBlockSize = 512 * Math.ceil(h.size / 512) - if (position + entryBlockSize + 512 > st.size) { - break - } - // the 512 for the header we just parsed will be added as well - // also jump ahead all the blocks for the body - position += entryBlockSize - if (opt.mtimeCache) { - opt.mtimeCache.set(h.path, h.mtime) - } - } - threw = false - - streamSync(opt, p, position, fd, files) - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } - } -} - -const streamSync = (opt, p, position, fd, files) => { - const stream = new fsm.WriteStreamSync(opt.file, { - fd: fd, - start: position, - }) - p.pipe(stream) - addFilesSync(p, files) -} - -const replace = (opt, files, cb) => { - files = Array.from(files) - const p = new Pack(opt) - - const getPos = (fd, size, cb_) => { - const cb = (er, pos) => { - if (er) { - fs.close(fd, _ => cb_(er)) - } else { - cb_(null, pos) - } - } - - let position = 0 - if (size === 0) { - return cb(null, 0) - } - - let bufPos = 0 - const headBuf = Buffer.alloc(512) - const onread = (er, bytes) => { - if (er) { - return cb(er) - } - bufPos += bytes - if (bufPos < 512 && bytes) { - return fs.read( - fd, headBuf, bufPos, headBuf.length - bufPos, - position + bufPos, onread - ) - } - - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { - return cb(new Error('cannot append to compressed archives')) - } - - // truncated header - if (bufPos < 512) { - return cb(null, position) - } - - const h = new Header(headBuf) - if (!h.cksumValid) { - return cb(null, position) - } - - const entryBlockSize = 512 * Math.ceil(h.size / 512) - if (position + entryBlockSize + 512 > size) { - return cb(null, position) - } - - position += entryBlockSize + 512 - if (position >= size) { - return cb(null, position) - } - - if (opt.mtimeCache) { - opt.mtimeCache.set(h.path, h.mtime) - } - bufPos = 0 - fs.read(fd, headBuf, 0, 512, position, onread) - } - fs.read(fd, headBuf, 0, 512, position, onread) - } - - const promise = new Promise((resolve, reject) => { - p.on('error', reject) - let flag = 'r+' - const onopen = (er, fd) => { - if (er && er.code === 'ENOENT' && flag === 'r+') { - flag = 'w+' - return fs.open(opt.file, flag, onopen) - } - - if (er) { - return reject(er) - } - - fs.fstat(fd, (er, st) => { - if (er) { - return fs.close(fd, () => reject(er)) - } - - getPos(fd, st.size, (er, position) => { - if (er) { - return reject(er) - } - const stream = new fsm.WriteStream(opt.file, { - fd: fd, - start: position, - }) - p.pipe(stream) - stream.on('error', reject) - stream.on('close', resolve) - addFilesAsync(p, files) - }) - }) - } - fs.open(opt.file, flag, onopen) - }) - - return cb ? promise.then(cb, cb) : promise -} - -const addFilesSync = (p, files) => { - files.forEach(file => { - if (file.charAt(0) === '@') { - t({ - file: path.resolve(p.cwd, file.slice(1)), - sync: true, - noResume: true, - onentry: entry => p.add(entry), - }) - } else { - p.add(file) - } - }) - p.end() -} - -const addFilesAsync = (p, files) => { - while (files.length) { - const file = files.shift() - if (file.charAt(0) === '@') { - return t({ - file: path.resolve(p.cwd, file.slice(1)), - noResume: true, - onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) - } else { - p.add(file) - } - } - p.end() -} diff --git a/lib/strip-absolute-path.js b/lib/strip-absolute-path.js deleted file mode 100644 index 185e2dea..00000000 --- a/lib/strip-absolute-path.js +++ /dev/null @@ -1,24 +0,0 @@ -// unix absolute paths are also absolute on win32, so we use this for both -const { isAbsolute, parse } = require('path').win32 - -// returns [root, stripped] -// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in -// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / -// explicitly if it's the first character. -// drive-specific relative paths on Windows get their root stripped off even -// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] -module.exports = path => { - let r = '' - - let parsed = parse(path) - while (isAbsolute(path) || parsed.root) { - // windows will think that //x/y/z has a "root" of //x/y/ - // but strip the //?/C:/ off of //?/C:/path - const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/' - : parsed.root - path = path.slice(root.length) - r += root - parsed = parse(path) - } - return [r, path] -} diff --git a/lib/strip-trailing-slashes.js b/lib/strip-trailing-slashes.js deleted file mode 100644 index 3e3ecec5..00000000 --- a/lib/strip-trailing-slashes.js +++ /dev/null @@ -1,13 +0,0 @@ -// warning: extremely hot code path. -// This has been meticulously optimized for use -// within npm install on large package trees. -// Do not edit without careful benchmarking. -module.exports = str => { - let i = str.length - 1 - let slashesStart = -1 - while (i > -1 && str.charAt(i) === '/') { - slashesStart = i - i-- - } - return slashesStart === -1 ? str : str.slice(0, slashesStart) -} diff --git a/lib/types.js b/lib/types.js deleted file mode 100644 index 7bfc2546..00000000 --- a/lib/types.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict' -// map types from key to human-friendly name -exports.name = new Map([ - ['0', 'File'], - // same as File - ['', 'OldFile'], - ['1', 'Link'], - ['2', 'SymbolicLink'], - // Devices and FIFOs aren't fully supported - // they are parsed, but skipped when unpacking - ['3', 'CharacterDevice'], - ['4', 'BlockDevice'], - ['5', 'Directory'], - ['6', 'FIFO'], - // same as File - ['7', 'ContiguousFile'], - // pax headers - ['g', 'GlobalExtendedHeader'], - ['x', 'ExtendedHeader'], - // vendor-specific stuff - // skip - ['A', 'SolarisACL'], - // like 5, but with data, which should be skipped - ['D', 'GNUDumpDir'], - // metadata only, skip - ['I', 'Inode'], - // data = link path of next file - ['K', 'NextFileHasLongLinkpath'], - // data = path of next file - ['L', 'NextFileHasLongPath'], - // skip - ['M', 'ContinuationFile'], - // like L - ['N', 'OldGnuLongPath'], - // skip - ['S', 'SparseFile'], - // skip - ['V', 'TapeVolumeHeader'], - // like x - ['X', 'OldExtendedHeader'], -]) - -// map the other direction -exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])) diff --git a/lib/unpack.js b/lib/unpack.js deleted file mode 100644 index 03172e2c..00000000 --- a/lib/unpack.js +++ /dev/null @@ -1,923 +0,0 @@ -'use strict' - -// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. -// but the path reservations are required to avoid race conditions where -// parallelized unpack ops may mess with one another, due to dependencies -// (like a Link depending on its target) or destructive operations (like -// clobbering an fs object to create one of a different type.) - -const assert = require('assert') -const Parser = require('./parse.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const mkdir = require('./mkdir.js') -const wc = require('./winchars.js') -const pathReservations = require('./path-reservations.js') -const stripAbsolutePath = require('./strip-absolute-path.js') -const normPath = require('./normalize-windows-path.js') -const stripSlash = require('./strip-trailing-slashes.js') -const normalize = require('./normalize-unicode.js') - -const ONENTRY = Symbol('onEntry') -const CHECKFS = Symbol('checkFs') -const CHECKFS2 = Symbol('checkFs2') -const PRUNECACHE = Symbol('pruneCache') -const ISREUSABLE = Symbol('isReusable') -const MAKEFS = Symbol('makeFs') -const FILE = Symbol('file') -const DIRECTORY = Symbol('directory') -const LINK = Symbol('link') -const SYMLINK = Symbol('symlink') -const HARDLINK = Symbol('hardlink') -const UNSUPPORTED = Symbol('unsupported') -const CHECKPATH = Symbol('checkPath') -const MKDIR = Symbol('mkdir') -const ONERROR = Symbol('onError') -const PENDING = Symbol('pending') -const PEND = Symbol('pend') -const UNPEND = Symbol('unpend') -const ENDED = Symbol('ended') -const MAYBECLOSE = Symbol('maybeClose') -const SKIP = Symbol('skip') -const DOCHOWN = Symbol('doChown') -const UID = Symbol('uid') -const GID = Symbol('gid') -const CHECKED_CWD = Symbol('checkedCwd') -const crypto = require('crypto') -const getFlag = require('./get-write-flag.js') -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -const isWindows = platform === 'win32' -const DEFAULT_MAX_DEPTH = 1024 - -// Unlinks on Windows are not atomic. -// -// This means that if you have a file entry, followed by another -// file entry with an identical name, and you cannot re-use the file -// (because it's a hardlink, or because unlink:true is set, or it's -// Windows, which does not have useful nlink values), then the unlink -// will be committed to the disk AFTER the new file has been written -// over the old one, deleting the new file. -// -// To work around this, on Windows systems, we rename the file and then -// delete the renamed file. It's a sloppy kludge, but frankly, I do not -// know of a better way to do this, given windows' non-atomic unlink -// semantics. -// -// See: https://github.com/npm/node-tar/issues/183 -/* istanbul ignore next */ -const unlinkFile = (path, cb) => { - if (!isWindows) { - return fs.unlink(path, cb) - } - - const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') - fs.rename(path, name, er => { - if (er) { - return cb(er) - } - fs.unlink(name, cb) - }) -} - -/* istanbul ignore next */ -const unlinkFileSync = path => { - if (!isWindows) { - return fs.unlinkSync(path) - } - - const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') - fs.renameSync(path, name) - fs.unlinkSync(name) -} - -// this.gid, entry.gid, this.processUid -const uint32 = (a, b, c) => - a === a >>> 0 ? a - : b === b >>> 0 ? b - : c - -// clear the cache if it's a case-insensitive unicode-squashing match. -// we can't know if the current file system is case-sensitive or supports -// unicode fully, so we check for similarity on the maximally compatible -// representation. Err on the side of pruning, since all it's doing is -// preventing lstats, and it's not the end of the world if we get a false -// positive. -// Note that on windows, we always drop the entire cache whenever a -// symbolic link is encountered, because 8.3 filenames are impossible -// to reason about, and collisions are hazards rather than just failures. -const cacheKeyNormalize = path => stripSlash(normPath(normalize(path))) - .toLowerCase() - -const pruneCache = (cache, abs) => { - abs = cacheKeyNormalize(abs) - for (const path of cache.keys()) { - const pnorm = cacheKeyNormalize(path) - if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { - cache.delete(path) - } - } -} - -const dropCache = cache => { - for (const key of cache.keys()) { - cache.delete(key) - } -} - -class Unpack extends Parser { - constructor (opt) { - if (!opt) { - opt = {} - } - - opt.ondone = _ => { - this[ENDED] = true - this[MAYBECLOSE]() - } - - super(opt) - - this[CHECKED_CWD] = false - - this.reservations = pathReservations() - - this.transform = typeof opt.transform === 'function' ? opt.transform : null - - this.writable = true - this.readable = false - - this[PENDING] = 0 - this[ENDED] = false - - this.dirCache = opt.dirCache || new Map() - - if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { - // need both or neither - if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') { - throw new TypeError('cannot set owner without number uid and gid') - } - if (opt.preserveOwner) { - throw new TypeError( - 'cannot preserve owner in archive and also set owner explicitly') - } - this.uid = opt.uid - this.gid = opt.gid - this.setOwner = true - } else { - this.uid = null - this.gid = null - this.setOwner = false - } - - // default true for root - if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') { - this.preserveOwner = process.getuid && process.getuid() === 0 - } else { - this.preserveOwner = !!opt.preserveOwner - } - - this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? - process.getuid() : null - this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ? - process.getgid() : null - - // prevent excessively deep nesting of subfolders - // set to `Infinity` to remove this restriction - this.maxDepth = typeof opt.maxDepth === 'number' - ? opt.maxDepth - : DEFAULT_MAX_DEPTH - - // mostly just for testing, but useful in some cases. - // Forcibly trigger a chown on every entry, no matter what - this.forceChown = opt.forceChown === true - - // turn > this[ONENTRY](entry)) - } - - // a bad or damaged archive is a warning for Parser, but an error - // when extracting. Mark those errors as unrecoverable, because - // the Unpack contract cannot be met. - warn (code, msg, data = {}) { - if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { - data.recoverable = false - } - return super.warn(code, msg, data) - } - - [MAYBECLOSE] () { - if (this[ENDED] && this[PENDING] === 0) { - this.emit('prefinish') - this.emit('finish') - this.emit('end') - } - } - - [CHECKPATH] (entry) { - const p = normPath(entry.path) - const parts = p.split('/') - - if (this.strip) { - if (parts.length < this.strip) { - return false - } - if (entry.type === 'Link') { - const linkparts = normPath(entry.linkpath).split('/') - if (linkparts.length >= this.strip) { - entry.linkpath = linkparts.slice(this.strip).join('/') - } else { - return false - } - } - parts.splice(0, this.strip) - entry.path = parts.join('/') - } - - if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { - this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { - entry, - path: p, - depth: parts.length, - maxDepth: this.maxDepth, - }) - return false - } - - if (!this.preservePaths) { - if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) { - this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { - entry, - path: p, - }) - return false - } - - // strip off the root - const [root, stripped] = stripAbsolutePath(p) - if (root) { - entry.path = stripped - this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { - entry, - path: p, - }) - } - } - - if (path.isAbsolute(entry.path)) { - entry.absolute = normPath(path.resolve(entry.path)) - } else { - entry.absolute = normPath(path.resolve(this.cwd, entry.path)) - } - - // if we somehow ended up with a path that escapes the cwd, and we are - // not in preservePaths mode, then something is fishy! This should have - // been prevented above, so ignore this for coverage. - /* istanbul ignore if - defense in depth */ - if (!this.preservePaths && - entry.absolute.indexOf(this.cwd + '/') !== 0 && - entry.absolute !== this.cwd) { - this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { - entry, - path: normPath(entry.path), - resolvedPath: entry.absolute, - cwd: this.cwd, - }) - return false - } - - // an archive can set properties on the extraction directory, but it - // may not replace the cwd with a different kind of thing entirely. - if (entry.absolute === this.cwd && - entry.type !== 'Directory' && - entry.type !== 'GNUDumpDir') { - return false - } - - // only encode : chars that aren't drive letter indicators - if (this.win32) { - const { root: aRoot } = path.win32.parse(entry.absolute) - entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length)) - const { root: pRoot } = path.win32.parse(entry.path) - entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) - } - - return true - } - - [ONENTRY] (entry) { - if (!this[CHECKPATH](entry)) { - return entry.resume() - } - - assert.equal(typeof entry.absolute, 'string') - - switch (entry.type) { - case 'Directory': - case 'GNUDumpDir': - if (entry.mode) { - entry.mode = entry.mode | 0o700 - } - - // eslint-disable-next-line no-fallthrough - case 'File': - case 'OldFile': - case 'ContiguousFile': - case 'Link': - case 'SymbolicLink': - return this[CHECKFS](entry) - - case 'CharacterDevice': - case 'BlockDevice': - case 'FIFO': - default: - return this[UNSUPPORTED](entry) - } - } - - [ONERROR] (er, entry) { - // Cwd has to exist, or else nothing works. That's serious. - // Other errors are warnings, which raise the error in strict - // mode, but otherwise continue on. - if (er.name === 'CwdError') { - this.emit('error', er) - } else { - this.warn('TAR_ENTRY_ERROR', er, { entry }) - this[UNPEND]() - entry.resume() - } - } - - [MKDIR] (dir, mode, cb) { - mkdir(normPath(dir), { - uid: this.uid, - gid: this.gid, - processUid: this.processUid, - processGid: this.processGid, - umask: this.processUmask, - preserve: this.preservePaths, - unlink: this.unlink, - cache: this.dirCache, - cwd: this.cwd, - mode: mode, - noChmod: this.noChmod, - }, cb) - } - - [DOCHOWN] (entry) { - // in preserve owner mode, chown if the entry doesn't match process - // in set owner mode, chown if setting doesn't match process - return this.forceChown || - this.preserveOwner && - (typeof entry.uid === 'number' && entry.uid !== this.processUid || - typeof entry.gid === 'number' && entry.gid !== this.processGid) - || - (typeof this.uid === 'number' && this.uid !== this.processUid || - typeof this.gid === 'number' && this.gid !== this.processGid) - } - - [UID] (entry) { - return uint32(this.uid, entry.uid, this.processUid) - } - - [GID] (entry) { - return uint32(this.gid, entry.gid, this.processGid) - } - - [FILE] (entry, fullyDone) { - const mode = entry.mode & 0o7777 || this.fmode - const stream = new fsm.WriteStream(entry.absolute, { - flags: getFlag(entry.size), - mode: mode, - autoClose: false, - }) - stream.on('error', er => { - if (stream.fd) { - fs.close(stream.fd, () => {}) - } - - // flush all the data out so that we aren't left hanging - // if the error wasn't actually fatal. otherwise the parse - // is blocked, and we never proceed. - stream.write = () => true - this[ONERROR](er, entry) - fullyDone() - }) - - let actions = 1 - const done = er => { - if (er) { - /* istanbul ignore else - we should always have a fd by now */ - if (stream.fd) { - fs.close(stream.fd, () => {}) - } - - this[ONERROR](er, entry) - fullyDone() - return - } - - if (--actions === 0) { - fs.close(stream.fd, er => { - if (er) { - this[ONERROR](er, entry) - } else { - this[UNPEND]() - } - fullyDone() - }) - } - } - - stream.on('finish', _ => { - // if futimes fails, try utimes - // if utimes fails, fail with the original error - // same for fchown/chown - const abs = entry.absolute - const fd = stream.fd - - if (entry.mtime && !this.noMtime) { - actions++ - const atime = entry.atime || new Date() - const mtime = entry.mtime - fs.futimes(fd, atime, mtime, er => - er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) - : done()) - } - - if (this[DOCHOWN](entry)) { - actions++ - const uid = this[UID](entry) - const gid = this[GID](entry) - fs.fchown(fd, uid, gid, er => - er ? fs.chown(abs, uid, gid, er2 => done(er2 && er)) - : done()) - } - - done() - }) - - const tx = this.transform ? this.transform(entry) || entry : entry - if (tx !== entry) { - tx.on('error', er => { - this[ONERROR](er, entry) - fullyDone() - }) - entry.pipe(tx) - } - tx.pipe(stream) - } - - [DIRECTORY] (entry, fullyDone) { - const mode = entry.mode & 0o7777 || this.dmode - this[MKDIR](entry.absolute, mode, er => { - if (er) { - this[ONERROR](er, entry) - fullyDone() - return - } - - let actions = 1 - const done = _ => { - if (--actions === 0) { - fullyDone() - this[UNPEND]() - entry.resume() - } - } - - if (entry.mtime && !this.noMtime) { - actions++ - fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done) - } - - if (this[DOCHOWN](entry)) { - actions++ - fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done) - } - - done() - }) - } - - [UNSUPPORTED] (entry) { - entry.unsupported = true - this.warn('TAR_ENTRY_UNSUPPORTED', - `unsupported entry type: ${entry.type}`, { entry }) - entry.resume() - } - - [SYMLINK] (entry, done) { - this[LINK](entry, entry.linkpath, 'symlink', done) - } - - [HARDLINK] (entry, done) { - const linkpath = normPath(path.resolve(this.cwd, entry.linkpath)) - this[LINK](entry, linkpath, 'link', done) - } - - [PEND] () { - this[PENDING]++ - } - - [UNPEND] () { - this[PENDING]-- - this[MAYBECLOSE]() - } - - [SKIP] (entry) { - this[UNPEND]() - entry.resume() - } - - // Check if we can reuse an existing filesystem entry safely and - // overwrite it, rather than unlinking and recreating - // Windows doesn't report a useful nlink, so we just never reuse entries - [ISREUSABLE] (entry, st) { - return entry.type === 'File' && - !this.unlink && - st.isFile() && - st.nlink <= 1 && - !isWindows - } - - // check if a thing is there, and if so, try to clobber it - [CHECKFS] (entry) { - this[PEND]() - const paths = [entry.path] - if (entry.linkpath) { - paths.push(entry.linkpath) - } - this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) - } - - [PRUNECACHE] (entry) { - // if we are not creating a directory, and the path is in the dirCache, - // then that means we are about to delete the directory we created - // previously, and it is no longer going to be a directory, and neither - // is any of its children. - // If a symbolic link is encountered, all bets are off. There is no - // reasonable way to sanitize the cache in such a way we will be able to - // avoid having filesystem collisions. If this happens with a non-symlink - // entry, it'll just fail to unpack, but a symlink to a directory, using an - // 8.3 shortname or certain unicode attacks, can evade detection and lead - // to arbitrary writes to anywhere on the system. - if (entry.type === 'SymbolicLink') { - dropCache(this.dirCache) - } else if (entry.type !== 'Directory') { - pruneCache(this.dirCache, entry.absolute) - } - } - - [CHECKFS2] (entry, fullyDone) { - this[PRUNECACHE](entry) - - const done = er => { - this[PRUNECACHE](entry) - fullyDone(er) - } - - const checkCwd = () => { - this[MKDIR](this.cwd, this.dmode, er => { - if (er) { - this[ONERROR](er, entry) - done() - return - } - this[CHECKED_CWD] = true - start() - }) - } - - const start = () => { - if (entry.absolute !== this.cwd) { - const parent = normPath(path.dirname(entry.absolute)) - if (parent !== this.cwd) { - return this[MKDIR](parent, this.dmode, er => { - if (er) { - this[ONERROR](er, entry) - done() - return - } - afterMakeParent() - }) - } - } - afterMakeParent() - } - - const afterMakeParent = () => { - fs.lstat(entry.absolute, (lstatEr, st) => { - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { - this[SKIP](entry) - done() - return - } - if (lstatEr || this[ISREUSABLE](entry, st)) { - return this[MAKEFS](null, entry, done) - } - - if (st.isDirectory()) { - if (entry.type === 'Directory') { - const needChmod = !this.noChmod && - entry.mode && - (st.mode & 0o7777) !== entry.mode - const afterChmod = er => this[MAKEFS](er, entry, done) - if (!needChmod) { - return afterChmod() - } - return fs.chmod(entry.absolute, entry.mode, afterChmod) - } - // Not a dir entry, have to remove it. - // NB: the only way to end up with an entry that is the cwd - // itself, in such a way that == does not detect, is a - // tricky windows absolute path with UNC or 8.3 parts (and - // preservePaths:true, or else it will have been stripped). - // In that case, the user has opted out of path protections - // explicitly, so if they blow away the cwd, c'est la vie. - if (entry.absolute !== this.cwd) { - return fs.rmdir(entry.absolute, er => - this[MAKEFS](er, entry, done)) - } - } - - // not a dir, and not reusable - // don't remove if the cwd, we want that error - if (entry.absolute === this.cwd) { - return this[MAKEFS](null, entry, done) - } - - unlinkFile(entry.absolute, er => - this[MAKEFS](er, entry, done)) - }) - } - - if (this[CHECKED_CWD]) { - start() - } else { - checkCwd() - } - } - - [MAKEFS] (er, entry, done) { - if (er) { - this[ONERROR](er, entry) - done() - return - } - - switch (entry.type) { - case 'File': - case 'OldFile': - case 'ContiguousFile': - return this[FILE](entry, done) - - case 'Link': - return this[HARDLINK](entry, done) - - case 'SymbolicLink': - return this[SYMLINK](entry, done) - - case 'Directory': - case 'GNUDumpDir': - return this[DIRECTORY](entry, done) - } - } - - [LINK] (entry, linkpath, link, done) { - // XXX: get the type ('symlink' or 'junction') for windows - fs[link](linkpath, entry.absolute, er => { - if (er) { - this[ONERROR](er, entry) - } else { - this[UNPEND]() - entry.resume() - } - done() - }) - } -} - -const callSync = fn => { - try { - return [null, fn()] - } catch (er) { - return [er, null] - } -} -class UnpackSync extends Unpack { - [MAKEFS] (er, entry) { - return super[MAKEFS](er, entry, () => {}) - } - - [CHECKFS] (entry) { - this[PRUNECACHE](entry) - - if (!this[CHECKED_CWD]) { - const er = this[MKDIR](this.cwd, this.dmode) - if (er) { - return this[ONERROR](er, entry) - } - this[CHECKED_CWD] = true - } - - // don't bother to make the parent if the current entry is the cwd, - // we've already checked it. - if (entry.absolute !== this.cwd) { - const parent = normPath(path.dirname(entry.absolute)) - if (parent !== this.cwd) { - const mkParent = this[MKDIR](parent, this.dmode) - if (mkParent) { - return this[ONERROR](mkParent, entry) - } - } - } - - const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { - return this[SKIP](entry) - } - - if (lstatEr || this[ISREUSABLE](entry, st)) { - return this[MAKEFS](null, entry) - } - - if (st.isDirectory()) { - if (entry.type === 'Directory') { - const needChmod = !this.noChmod && - entry.mode && - (st.mode & 0o7777) !== entry.mode - const [er] = needChmod ? callSync(() => { - fs.chmodSync(entry.absolute, entry.mode) - }) : [] - return this[MAKEFS](er, entry) - } - // not a dir entry, have to remove it - const [er] = callSync(() => fs.rmdirSync(entry.absolute)) - this[MAKEFS](er, entry) - } - - // not a dir, and not reusable. - // don't remove if it's the cwd, since we want that error. - const [er] = entry.absolute === this.cwd ? [] - : callSync(() => unlinkFileSync(entry.absolute)) - this[MAKEFS](er, entry) - } - - [FILE] (entry, done) { - const mode = entry.mode & 0o7777 || this.fmode - - const oner = er => { - let closeError - try { - fs.closeSync(fd) - } catch (e) { - closeError = e - } - if (er || closeError) { - this[ONERROR](er || closeError, entry) - } - done() - } - - let fd - try { - fd = fs.openSync(entry.absolute, getFlag(entry.size), mode) - } catch (er) { - return oner(er) - } - const tx = this.transform ? this.transform(entry) || entry : entry - if (tx !== entry) { - tx.on('error', er => this[ONERROR](er, entry)) - entry.pipe(tx) - } - - tx.on('data', chunk => { - try { - fs.writeSync(fd, chunk, 0, chunk.length) - } catch (er) { - oner(er) - } - }) - - tx.on('end', _ => { - let er = null - // try both, falling futimes back to utimes - // if either fails, handle the first error - if (entry.mtime && !this.noMtime) { - const atime = entry.atime || new Date() - const mtime = entry.mtime - try { - fs.futimesSync(fd, atime, mtime) - } catch (futimeser) { - try { - fs.utimesSync(entry.absolute, atime, mtime) - } catch (utimeser) { - er = futimeser - } - } - } - - if (this[DOCHOWN](entry)) { - const uid = this[UID](entry) - const gid = this[GID](entry) - - try { - fs.fchownSync(fd, uid, gid) - } catch (fchowner) { - try { - fs.chownSync(entry.absolute, uid, gid) - } catch (chowner) { - er = er || fchowner - } - } - } - - oner(er) - }) - } - - [DIRECTORY] (entry, done) { - const mode = entry.mode & 0o7777 || this.dmode - const er = this[MKDIR](entry.absolute, mode) - if (er) { - this[ONERROR](er, entry) - done() - return - } - if (entry.mtime && !this.noMtime) { - try { - fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime) - } catch (er) {} - } - if (this[DOCHOWN](entry)) { - try { - fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry)) - } catch (er) {} - } - done() - entry.resume() - } - - [MKDIR] (dir, mode) { - try { - return mkdir.sync(normPath(dir), { - uid: this.uid, - gid: this.gid, - processUid: this.processUid, - processGid: this.processGid, - umask: this.processUmask, - preserve: this.preservePaths, - unlink: this.unlink, - cache: this.dirCache, - cwd: this.cwd, - mode: mode, - }) - } catch (er) { - return er - } - } - - [LINK] (entry, linkpath, link, done) { - try { - fs[link + 'Sync'](linkpath, entry.absolute) - done() - entry.resume() - } catch (er) { - return this[ONERROR](er, entry) - } - } -} - -Unpack.Sync = UnpackSync -module.exports = Unpack diff --git a/lib/update.js b/lib/update.js deleted file mode 100644 index 4d328543..00000000 --- a/lib/update.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict' - -// tar -u - -const hlo = require('./high-level-opt.js') -const r = require('./replace.js') -// just call tar.r with the filter and mtimeCache - -module.exports = (opt_, files, cb) => { - const opt = hlo(opt_) - - if (!opt.file) { - throw new TypeError('file is required') - } - - if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { - throw new TypeError('cannot append to compressed archives') - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - mtimeFilter(opt) - return r(opt, files, cb) -} - -const mtimeFilter = opt => { - const filter = opt.filter - - if (!opt.mtimeCache) { - opt.mtimeCache = new Map() - } - - opt.filter = filter ? (path, stat) => - filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime) - : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime) -} diff --git a/lib/warn-mixin.js b/lib/warn-mixin.js deleted file mode 100644 index a9406396..00000000 --- a/lib/warn-mixin.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' -module.exports = Base => class extends Base { - warn (code, message, data = {}) { - if (this.file) { - data.file = this.file - } - if (this.cwd) { - data.cwd = this.cwd - } - data.code = message instanceof Error && message.code || code - data.tarCode = code - if (!this.strict && data.recoverable !== false) { - if (message instanceof Error) { - data = Object.assign(message, data) - message = message.message - } - this.emit('warn', data.tarCode, message, data) - } else if (message instanceof Error) { - this.emit('error', Object.assign(message, data)) - } else { - this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) - } - } -} diff --git a/lib/winchars.js b/lib/winchars.js deleted file mode 100644 index ebcab4ae..00000000 --- a/lib/winchars.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -// When writing files on Windows, translate the characters to their -// 0xf000 higher-encoded versions. - -const raw = [ - '|', - '<', - '>', - '?', - ':', -] - -const win = raw.map(char => - String.fromCharCode(0xf000 + char.charCodeAt(0))) - -const toWin = new Map(raw.map((char, i) => [char, win[i]])) -const toRaw = new Map(win.map((char, i) => [char, raw[i]])) - -module.exports = { - encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s), - decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s), -} diff --git a/lib/write-entry.js b/lib/write-entry.js deleted file mode 100644 index 7d2f3eb1..00000000 --- a/lib/write-entry.js +++ /dev/null @@ -1,546 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const Pax = require('./pax.js') -const Header = require('./header.js') -const fs = require('fs') -const path = require('path') -const normPath = require('./normalize-windows-path.js') -const stripSlash = require('./strip-trailing-slashes.js') - -const prefixPath = (path, prefix) => { - if (!prefix) { - return normPath(path) - } - path = normPath(path).replace(/^\.(\/|$)/, '') - return stripSlash(prefix) + '/' + path -} - -const maxReadSize = 16 * 1024 * 1024 -const PROCESS = Symbol('process') -const FILE = Symbol('file') -const DIRECTORY = Symbol('directory') -const SYMLINK = Symbol('symlink') -const HARDLINK = Symbol('hardlink') -const HEADER = Symbol('header') -const READ = Symbol('read') -const LSTAT = Symbol('lstat') -const ONLSTAT = Symbol('onlstat') -const ONREAD = Symbol('onread') -const ONREADLINK = Symbol('onreadlink') -const OPENFILE = Symbol('openfile') -const ONOPENFILE = Symbol('onopenfile') -const CLOSE = Symbol('close') -const MODE = Symbol('mode') -const AWAITDRAIN = Symbol('awaitDrain') -const ONDRAIN = Symbol('ondrain') -const PREFIX = Symbol('prefix') -const HAD_ERROR = Symbol('hadError') -const warner = require('./warn-mixin.js') -const winchars = require('./winchars.js') -const stripAbsolutePath = require('./strip-absolute-path.js') - -const modeFix = require('./mode-fix.js') - -const WriteEntry = warner(class WriteEntry extends Minipass { - constructor (p, opt) { - opt = opt || {} - super(opt) - if (typeof p !== 'string') { - throw new TypeError('path is required') - } - this.path = normPath(p) - // suppress atime, ctime, uid, gid, uname, gname - this.portable = !!opt.portable - // until node has builtin pwnam functions, this'll have to do - this.myuid = process.getuid && process.getuid() || 0 - this.myuser = process.env.USER || '' - this.maxReadSize = opt.maxReadSize || maxReadSize - this.linkCache = opt.linkCache || new Map() - this.statCache = opt.statCache || new Map() - this.preservePaths = !!opt.preservePaths - this.cwd = normPath(opt.cwd || process.cwd()) - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null - this.prefix = opt.prefix ? normPath(opt.prefix) : null - - this.fd = null - this.blockLen = null - this.blockRemain = null - this.buf = null - this.offset = null - this.length = null - this.pos = null - this.remain = null - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - let pathWarn = false - if (!this.preservePaths) { - const [root, stripped] = stripAbsolutePath(this.path) - if (root) { - this.path = stripped - pathWarn = root - } - } - - this.win32 = !!opt.win32 || process.platform === 'win32' - if (this.win32) { - // force the \ to / normalization, since we might not *actually* - // be on windows, but want \ to be considered a path separator. - this.path = winchars.decode(this.path.replace(/\\/g, '/')) - p = p.replace(/\\/g, '/') - } - - this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p)) - - if (this.path === '') { - this.path = './' - } - - if (pathWarn) { - this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { - entry: this, - path: pathWarn + this.path, - }) - } - - if (this.statCache.has(this.absolute)) { - this[ONLSTAT](this.statCache.get(this.absolute)) - } else { - this[LSTAT]() - } - } - - emit (ev, ...data) { - if (ev === 'error') { - this[HAD_ERROR] = true - } - return super.emit(ev, ...data) - } - - [LSTAT] () { - fs.lstat(this.absolute, (er, stat) => { - if (er) { - return this.emit('error', er) - } - this[ONLSTAT](stat) - }) - } - - [ONLSTAT] (stat) { - this.statCache.set(this.absolute, stat) - this.stat = stat - if (!stat.isFile()) { - stat.size = 0 - } - this.type = getType(stat) - this.emit('stat', stat) - this[PROCESS]() - } - - [PROCESS] () { - switch (this.type) { - case 'File': return this[FILE]() - case 'Directory': return this[DIRECTORY]() - case 'SymbolicLink': return this[SYMLINK]() - // unsupported types are ignored. - default: return this.end() - } - } - - [MODE] (mode) { - return modeFix(mode, this.type === 'Directory', this.portable) - } - - [PREFIX] (path) { - return prefixPath(path, this.prefix) - } - - [HEADER] () { - if (this.type === 'Directory' && this.portable) { - this.noMtime = true - } - - this.header = new Header({ - path: this[PREFIX](this.path), - // only apply the prefix to hard links. - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - // only the permissions and setuid/setgid/sticky bitflags - // not the higher-order bits that specify file type - mode: this[MODE](this.stat.mode), - uid: this.portable ? null : this.stat.uid, - gid: this.portable ? null : this.stat.gid, - size: this.stat.size, - mtime: this.noMtime ? null : this.mtime || this.stat.mtime, - type: this.type, - uname: this.portable ? null : - this.stat.uid === this.myuid ? this.myuser : '', - atime: this.portable ? null : this.stat.atime, - ctime: this.portable ? null : this.stat.ctime, - }) - - if (this.header.encode() && !this.noPax) { - super.write(new Pax({ - atime: this.portable ? null : this.header.atime, - ctime: this.portable ? null : this.header.ctime, - gid: this.portable ? null : this.header.gid, - mtime: this.noMtime ? null : this.mtime || this.header.mtime, - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - size: this.header.size, - uid: this.portable ? null : this.header.uid, - uname: this.portable ? null : this.header.uname, - dev: this.portable ? null : this.stat.dev, - ino: this.portable ? null : this.stat.ino, - nlink: this.portable ? null : this.stat.nlink, - }).encode()) - } - super.write(this.header.block) - } - - [DIRECTORY] () { - if (this.path.slice(-1) !== '/') { - this.path += '/' - } - this.stat.size = 0 - this[HEADER]() - this.end() - } - - [SYMLINK] () { - fs.readlink(this.absolute, (er, linkpath) => { - if (er) { - return this.emit('error', er) - } - this[ONREADLINK](linkpath) - }) - } - - [ONREADLINK] (linkpath) { - this.linkpath = normPath(linkpath) - this[HEADER]() - this.end() - } - - [HARDLINK] (linkpath) { - this.type = 'Link' - this.linkpath = normPath(path.relative(this.cwd, linkpath)) - this.stat.size = 0 - this[HEADER]() - this.end() - } - - [FILE] () { - if (this.stat.nlink > 1) { - const linkKey = this.stat.dev + ':' + this.stat.ino - if (this.linkCache.has(linkKey)) { - const linkpath = this.linkCache.get(linkKey) - if (linkpath.indexOf(this.cwd) === 0) { - return this[HARDLINK](linkpath) - } - } - this.linkCache.set(linkKey, this.absolute) - } - - this[HEADER]() - if (this.stat.size === 0) { - return this.end() - } - - this[OPENFILE]() - } - - [OPENFILE] () { - fs.open(this.absolute, 'r', (er, fd) => { - if (er) { - return this.emit('error', er) - } - this[ONOPENFILE](fd) - }) - } - - [ONOPENFILE] (fd) { - this.fd = fd - if (this[HAD_ERROR]) { - return this[CLOSE]() - } - - this.blockLen = 512 * Math.ceil(this.stat.size / 512) - this.blockRemain = this.blockLen - const bufLen = Math.min(this.blockLen, this.maxReadSize) - this.buf = Buffer.allocUnsafe(bufLen) - this.offset = 0 - this.pos = 0 - this.remain = this.stat.size - this.length = this.buf.length - this[READ]() - } - - [READ] () { - const { fd, buf, offset, length, pos } = this - fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { - if (er) { - // ignoring the error from close(2) is a bad practice, but at - // this point we already have an error, don't need another one - return this[CLOSE](() => this.emit('error', er)) - } - this[ONREAD](bytesRead) - }) - } - - [CLOSE] (cb) { - fs.close(this.fd, cb) - } - - [ONREAD] (bytesRead) { - if (bytesRead <= 0 && this.remain > 0) { - const er = new Error('encountered unexpected EOF') - er.path = this.absolute - er.syscall = 'read' - er.code = 'EOF' - return this[CLOSE](() => this.emit('error', er)) - } - - if (bytesRead > this.remain) { - const er = new Error('did not encounter expected EOF') - er.path = this.absolute - er.syscall = 'read' - er.code = 'EOF' - return this[CLOSE](() => this.emit('error', er)) - } - - // null out the rest of the buffer, if we could fit the block padding - // at the end of this loop, we've incremented bytesRead and this.remain - // to be incremented up to the blockRemain level, as if we had expected - // to get a null-padded file, and read it until the end. then we will - // decrement both remain and blockRemain by bytesRead, and know that we - // reached the expected EOF, without any null buffer to append. - if (bytesRead === this.remain) { - for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { - this.buf[i + this.offset] = 0 - bytesRead++ - this.remain++ - } - } - - const writeBuf = this.offset === 0 && bytesRead === this.buf.length ? - this.buf : this.buf.slice(this.offset, this.offset + bytesRead) - - const flushed = this.write(writeBuf) - if (!flushed) { - this[AWAITDRAIN](() => this[ONDRAIN]()) - } else { - this[ONDRAIN]() - } - } - - [AWAITDRAIN] (cb) { - this.once('drain', cb) - } - - write (writeBuf) { - if (this.blockRemain < writeBuf.length) { - const er = new Error('writing more data than expected') - er.path = this.absolute - return this.emit('error', er) - } - this.remain -= writeBuf.length - this.blockRemain -= writeBuf.length - this.pos += writeBuf.length - this.offset += writeBuf.length - return super.write(writeBuf) - } - - [ONDRAIN] () { - if (!this.remain) { - if (this.blockRemain) { - super.write(Buffer.alloc(this.blockRemain)) - } - return this[CLOSE](er => er ? this.emit('error', er) : this.end()) - } - - if (this.offset >= this.length) { - // if we only have a smaller bit left to read, alloc a smaller buffer - // otherwise, keep it the same length it was before. - this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)) - this.offset = 0 - } - this.length = this.buf.length - this.offset - this[READ]() - } -}) - -class WriteEntrySync extends WriteEntry { - [LSTAT] () { - this[ONLSTAT](fs.lstatSync(this.absolute)) - } - - [SYMLINK] () { - this[ONREADLINK](fs.readlinkSync(this.absolute)) - } - - [OPENFILE] () { - this[ONOPENFILE](fs.openSync(this.absolute, 'r')) - } - - [READ] () { - let threw = true - try { - const { fd, buf, offset, length, pos } = this - const bytesRead = fs.readSync(fd, buf, offset, length, pos) - this[ONREAD](bytesRead) - threw = false - } finally { - // ignoring the error from close(2) is a bad practice, but at - // this point we already have an error, don't need another one - if (threw) { - try { - this[CLOSE](() => {}) - } catch (er) {} - } - } - } - - [AWAITDRAIN] (cb) { - cb() - } - - [CLOSE] (cb) { - fs.closeSync(this.fd) - cb() - } -} - -const WriteEntryTar = warner(class WriteEntryTar extends Minipass { - constructor (readEntry, opt) { - opt = opt || {} - super(opt) - this.preservePaths = !!opt.preservePaths - this.portable = !!opt.portable - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.noMtime = !!opt.noMtime - - this.readEntry = readEntry - this.type = readEntry.type - if (this.type === 'Directory' && this.portable) { - this.noMtime = true - } - - this.prefix = opt.prefix || null - - this.path = normPath(readEntry.path) - this.mode = this[MODE](readEntry.mode) - this.uid = this.portable ? null : readEntry.uid - this.gid = this.portable ? null : readEntry.gid - this.uname = this.portable ? null : readEntry.uname - this.gname = this.portable ? null : readEntry.gname - this.size = readEntry.size - this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime - this.atime = this.portable ? null : readEntry.atime - this.ctime = this.portable ? null : readEntry.ctime - this.linkpath = normPath(readEntry.linkpath) - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - let pathWarn = false - if (!this.preservePaths) { - const [root, stripped] = stripAbsolutePath(this.path) - if (root) { - this.path = stripped - pathWarn = root - } - } - - this.remain = readEntry.size - this.blockRemain = readEntry.startBlockSize - - this.header = new Header({ - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - // only the permissions and setuid/setgid/sticky bitflags - // not the higher-order bits that specify file type - mode: this.mode, - uid: this.portable ? null : this.uid, - gid: this.portable ? null : this.gid, - size: this.size, - mtime: this.noMtime ? null : this.mtime, - type: this.type, - uname: this.portable ? null : this.uname, - atime: this.portable ? null : this.atime, - ctime: this.portable ? null : this.ctime, - }) - - if (pathWarn) { - this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { - entry: this, - path: pathWarn + this.path, - }) - } - - if (this.header.encode() && !this.noPax) { - super.write(new Pax({ - atime: this.portable ? null : this.atime, - ctime: this.portable ? null : this.ctime, - gid: this.portable ? null : this.gid, - mtime: this.noMtime ? null : this.mtime, - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - size: this.size, - uid: this.portable ? null : this.uid, - uname: this.portable ? null : this.uname, - dev: this.portable ? null : this.readEntry.dev, - ino: this.portable ? null : this.readEntry.ino, - nlink: this.portable ? null : this.readEntry.nlink, - }).encode()) - } - - super.write(this.header.block) - readEntry.pipe(this) - } - - [PREFIX] (path) { - return prefixPath(path, this.prefix) - } - - [MODE] (mode) { - return modeFix(mode, this.type === 'Directory', this.portable) - } - - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) { - throw new Error('writing more to entry than is appropriate') - } - this.blockRemain -= writeLen - return super.write(data) - } - - end () { - if (this.blockRemain) { - super.write(Buffer.alloc(this.blockRemain)) - } - return super.end() - } -}) - -WriteEntry.Sync = WriteEntrySync -WriteEntry.Tar = WriteEntryTar - -const getType = stat => - stat.isFile() ? 'File' - : stat.isDirectory() ? 'Directory' - : stat.isSymbolicLink() ? 'SymbolicLink' - : 'Unsupported' - -module.exports = WriteEntry