diff --git a/canary-publish/README.md b/canary-publish/README.md index c59bcbb..d12bed4 100644 --- a/canary-publish/README.md +++ b/canary-publish/README.md @@ -45,7 +45,8 @@ jobs: npm_tag: canary # npm 배포 시 달아줄 태그는 무엇으로 할지적어주세요 npm_token: ${{ secrets.NPM_TOKEN }} # npm 배포시 필요한 publish token 을 넣어주세요 publish_script: pnpm run deploy:canary # canary 배포 실행 script 를 넣어주세요 - packages_dir: packages # 변경을 탐지할 패키지들의 폴더명을 추가해주세요. (default: packages,share) + packages_dir: packages # 변경을 탐지할 패키지들의 폴더명을 추가해주세요. (default: packages,share) + excludes: ".turbo,.github" # 변경감지를 제외하고싶은 파일 또는 폴더 경로 ``` ## 실행 결과 diff --git a/canary-publish/action.yml b/canary-publish/action.yml index 54150a8..1a41668 100644 --- a/canary-publish/action.yml +++ b/canary-publish/action.yml @@ -21,3 +21,7 @@ inputs: description: "패키지 디렉터리" required: false default: "packages,share" + excludes: + description: "제외할 경로" + required: false + default: ".github,.changeset" diff --git a/canary-publish/dist/index.js b/canary-publish/dist/index.js index 9bd9723..0dafd25 100644 --- a/canary-publish/dist/index.js +++ b/canary-publish/dist/index.js @@ -53253,12 +53253,14 @@ const read_1 = __importDefault(__nccwpck_require__(1746)); const fs_extra_1 = __importDefault(__nccwpck_require__(77)); const resolve_from_1 = __importDefault(__nccwpck_require__(1345)); const apis_1 = __importDefault(__nccwpck_require__(6500)); +const utils_1 = __nccwpck_require__(3927); const file_1 = __nccwpck_require__(398); const npm_1 = __nccwpck_require__(6824); const publish_1 = __nccwpck_require__(9459); const cwd = process.cwd(); function main() { return __awaiter(this, void 0, void 0, function* () { + var _a; // npmrc 설정 yield (0, npm_1.setNpmRc)(); const { pullFetchers, issueFetchers } = (0, apis_1.default)(); @@ -53271,18 +53273,27 @@ function main() { yield issueFetchers.addComment('올바른 카나리 버전 배포를 위해 detect version을 명시해주세요'); return; } + const changedFiles = yield (0, utils_1.getChangedAllFiles)({ + pullNumber: pullRequestInfo.number, + }); // 변경된 패키지 파일을 가져온다 const packagesDir = core.getInput('packages_dir'); + const excludes = (_a = core.getInput('excludes')) !== null && _a !== void 0 ? _a : ''; const changedPackageInfos = yield (0, file_1.getChangedPackages)({ - pullNumber: pullRequestInfo.number, packagesDir: packagesDir.split(','), + excludes: excludes.split(','), + changedFiles, }); if (changedPackageInfos.length === 0) { core.info('변경된 패키지가 없습니다.'); return; } - // 변경사항외 다른 패키지들의 배포를 막습니다. - yield (0, file_1.protectUnchangedPackages)(changedPackageInfos); + yield Promise.all([ + // 이번 변경건과 관련없는 모든 .changeset/*.md 파일을 제거한다. + (0, file_1.removeChangesetMdFiles)({ changedFiles }), + // 변경사항외 다른 패키지들의 배포를 막습니다. + (0, file_1.protectUnchangedPackages)(changedPackageInfos), + ]); // 패키지 변경 버전 반영 yield (0, exec_1.exec)('node', [(0, resolve_from_1.default)(cwd, '@changesets/cli/bin.js'), 'version'], { cwd, @@ -53383,26 +53394,31 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getChangedPackages = getChangedPackages; exports.getAllPackageJSON = getAllPackageJSON; exports.protectUnchangedPackages = protectUnchangedPackages; +exports.removeChangesetMdFiles = removeChangesetMdFiles; const core = __importStar(__nccwpck_require__(6108)); const fast_glob_1 = __importDefault(__nccwpck_require__(6014)); const fs_extra_1 = __importDefault(__nccwpck_require__(77)); const utils_1 = __nccwpck_require__(3927); function getChangedPackages(_a) { - return __awaiter(this, arguments, void 0, function* ({ pullNumber, packagesDir }) { - const changedFiles = yield (0, utils_1.getChangedAllFiles)({ - pullNumber, - }); + return __awaiter(this, arguments, void 0, function* ({ changedFiles, packagesDir, excludes, }) { + const isIncludedRoot = packagesDir.includes('.') === true; + const targetDirectories = packagesDir.filter((packagename) => packagename !== '.'); const changedPackages = changedFiles.reduce((acc, { filename }) => { - const isTargetDirectories = packagesDir.some((packageDir) => filename.includes(`${packageDir}/`)); - const isMarkdownFile = filename.endsWith('.md'); - if (isTargetDirectories && !isMarkdownFile) { - const [packageRoot, packageName] = filename.split('/'); - const packageJsonPath = [packageRoot, packageName, 'package.json'].join('/'); - acc.push(packageJsonPath); + const 패키지대상인가 = isIncludedRoot || targetDirectories.some((packageDir) => filename.includes(`${packageDir}/`)); + const 마크다운파일인가 = filename.endsWith('.md'); + const 제외대상인가 = excludes.some((exclude) => { + return filename === exclude || filename.startsWith(`${exclude}`); + }); + if (패키지대상인가 && !마크다운파일인가 && !제외대상인가) { + const packageJsonPath = isIncludedRoot ? 'package.json' : (0, utils_1.findNearestPackageJson)(filename); + if (packageJsonPath != null) { + acc.add(packageJsonPath); + } } return acc; - }, []); - return [...new Set(changedPackages)]; + }, new Set()); + console.log('필터링된 packages', Array.from(changedPackages)); // eslint-disable-line + return Array.from(changedPackages); }); } function getAllPackageJSON() { @@ -53426,6 +53442,17 @@ function protectUnchangedPackages(changedPackages) { } }); } +function removeChangesetMdFiles(_a) { + return __awaiter(this, arguments, void 0, function* ({ changedFiles, }) { + const markdownPaths = yield (0, fast_glob_1.default)('.changeset/*.md'); + return Promise.all(markdownPaths.map((markdownPath) => __awaiter(this, void 0, void 0, function* () { + if (changedFiles.find(({ filename }) => filename === markdownPath) == null) { + console.log(`PR과 관련없는 ${markdownPath} 제거`); // eslint-disable-line + yield fs_extra_1.default.remove(markdownPath); + } + }))); + }); +} /***/ }), @@ -53818,10 +53845,16 @@ var __asyncValues = (this && this.__asyncValues) || function (o) { function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getChangedAllFiles = getChangedAllFiles; +exports.findNearestPackageJson = findNearestPackageJson; +const path_1 = __importDefault(__nccwpck_require__(1017)); const core = __importStar(__nccwpck_require__(6108)); const github = __importStar(__nccwpck_require__(1645)); +const fs_extra_1 = __importDefault(__nccwpck_require__(77)); const utils_1 = __nccwpck_require__(3927); function getChangedAllFiles(_a) { return __awaiter(this, arguments, void 0, function* ({ pullNumber }) { @@ -53853,6 +53886,17 @@ function getChangedAllFiles(_a) { return changedFiles; }); } +function findNearestPackageJson(filePath) { + let currentDir = path_1.default.dirname(filePath); + while (currentDir !== path_1.default.parse(currentDir).root) { + const packageJsonPath = path_1.default.join(currentDir, 'package.json'); + if (fs_extra_1.default.existsSync(packageJsonPath)) { + return packageJsonPath; + } + currentDir = path_1.default.dirname(currentDir); + } + return undefined; +} /***/ }), diff --git a/canary-publish/package.json b/canary-publish/package.json index 02a99ae..51b0105 100644 --- a/canary-publish/package.json +++ b/canary-publish/package.json @@ -16,11 +16,9 @@ "@changesets/read": "^0.6.0", "@vercel/ncc": "^0.38.1", "fast-glob": "^3.3.2", - "fs-extra": "^8.1.0", "resolve-from": "^5.0.0" }, "devDependencies": { - "@types/fs-extra": "^8.0.0", "@types/node": "^20.14.9" } } diff --git a/canary-publish/src/index.ts b/canary-publish/src/index.ts index 342bea7..39a644c 100644 --- a/canary-publish/src/index.ts +++ b/canary-publish/src/index.ts @@ -5,8 +5,9 @@ import fs from 'fs-extra' import resolveFrom from 'resolve-from' import createFetchers from '$actions/apis' +import {getChangedAllFiles} from '$actions/utils' -import {getChangedPackages, protectUnchangedPackages} from './utils/file' +import {getChangedPackages, protectUnchangedPackages, removeChangesetMdFiles} from './utils/file' import {setNpmRc} from './utils/npm' import {getPublishedPackageInfos} from './utils/publish' @@ -30,11 +31,18 @@ async function main() { return } + const changedFiles = await getChangedAllFiles({ + pullNumber: pullRequestInfo.number, + }) + // 변경된 패키지 파일을 가져온다 const packagesDir = core.getInput('packages_dir') + const excludes = core.getInput('excludes') ?? '' + const changedPackageInfos = await getChangedPackages({ - pullNumber: pullRequestInfo.number, packagesDir: packagesDir.split(',') as string[], + excludes: excludes.split(',') as string[], + changedFiles, }) if (changedPackageInfos.length === 0) { @@ -42,8 +50,12 @@ async function main() { return } - // 변경사항외 다른 패키지들의 배포를 막습니다. - await protectUnchangedPackages(changedPackageInfos) + await Promise.all([ + // 이번 변경건과 관련없는 모든 .changeset/*.md 파일을 제거한다. + removeChangesetMdFiles({changedFiles}), + // 변경사항외 다른 패키지들의 배포를 막습니다. + protectUnchangedPackages(changedPackageInfos), + ]) // 패키지 변경 버전 반영 await exec('node', [resolveFrom(cwd, '@changesets/cli/bin.js'), 'version'], { diff --git a/canary-publish/src/utils/file.ts b/canary-publish/src/utils/file.ts index 45355be..765e8ff 100644 --- a/canary-publish/src/utils/file.ts +++ b/canary-publish/src/utils/file.ts @@ -2,27 +2,43 @@ import * as core from '@actions/core' import fg from 'fast-glob' import fs from 'fs-extra' -import {getChangedAllFiles} from '$actions/utils' +import {findNearestPackageJson, getChangedAllFiles} from '$actions/utils' -export async function getChangedPackages({pullNumber, packagesDir}: {pullNumber: number; packagesDir: string[]}) { - const changedFiles = await getChangedAllFiles({ - pullNumber, - }) +export async function getChangedPackages({ + changedFiles, + packagesDir, + excludes, +}: { + changedFiles: Awaited> + packagesDir: string[] + excludes: string[] +}) { + const isIncludedRoot = packagesDir.includes('.') === true + const targetDirectories = packagesDir.filter((packagename) => packagename !== '.') const changedPackages = changedFiles.reduce((acc, {filename}) => { - const isTargetDirectories = packagesDir.some((packageDir) => filename.includes(`${packageDir}/`)) - const isMarkdownFile = filename.endsWith('.md') + const 패키지대상인가 = + isIncludedRoot || targetDirectories.some((packageDir) => filename.includes(`${packageDir}/`)) + + const 마크다운파일인가 = filename.endsWith('.md') + const 제외대상인가 = excludes.some((exclude) => { + return filename === exclude || filename.startsWith(`${exclude}`) + }) - if (isTargetDirectories && !isMarkdownFile) { - const [packageRoot, packageName] = filename.split('/') - const packageJsonPath = [packageRoot, packageName, 'package.json'].join('/') + if (패키지대상인가 && !마크다운파일인가 && !제외대상인가) { + const packageJsonPath = isIncludedRoot ? 'package.json' : findNearestPackageJson(filename) - acc.push(packageJsonPath) + if (packageJsonPath != null) { + acc.add(packageJsonPath) + } } + return acc - }, [] as string[]) + }, new Set()) + + console.log('필터링된 packages', Array.from(changedPackages)) // eslint-disable-line - return [...new Set(changedPackages)] + return Array.from(changedPackages) } export async function getAllPackageJSON() { @@ -48,3 +64,21 @@ export async function protectUnchangedPackages(changedPackages: string[]) { } } } + +export async function removeChangesetMdFiles({ + changedFiles, +}: { + changedFiles: Awaited> +}) { + const markdownPaths = await fg('.changeset/*.md') + + return Promise.all( + markdownPaths.map(async (markdownPath) => { + if (changedFiles.find(({filename}) => filename === markdownPath) == null) { + console.log(`PR과 관련없는 ${markdownPath} 제거`) // eslint-disable-line + + await fs.remove(markdownPath) + } + }), + ) +} diff --git a/detect-add/README.md b/detect-add/README.md index 03e7bdc..e715c0c 100644 --- a/detect-add/README.md +++ b/detect-add/README.md @@ -31,6 +31,7 @@ jobs: skip_label: skip_detect_label # 해당 액션을 skip할 label의 이름을 적어주세요. (default: skip-detect-change) packages_dir: packages # 변경을 탐지할 패키지들의 폴더명을 추가해주세요. (default: packages,share) formatting_script: pnpm run markdownlint:fix # 생성되는 md 파일의 formatting이 필요하다면 추가해주세요 + excludes: ".turbo,.github" # 변경감지를 제외하고싶은 파일 또는 폴더 경로 ``` ## 실행 결과 diff --git a/detect-add/action.yml b/detect-add/action.yml index 261b905..157945e 100644 --- a/detect-add/action.yml +++ b/detect-add/action.yml @@ -22,3 +22,7 @@ inputs: formatting_script: description: ".changeset/*.md 파일 포매팅이 필요하다면 script 명령어를 추가해주세요" required: false + excludes: + description: "제외할 경로" + required: false + default: ".github,.changeset" diff --git a/detect-add/dist/index.js b/detect-add/dist/index.js index f4c9bf6..5be5da5 100644 --- a/detect-add/dist/index.js +++ b/detect-add/dist/index.js @@ -7229,6 +7229,3123 @@ class Deprecation extends Error { exports.Deprecation = Deprecation; +/***/ }), + +/***/ 1518: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const mkdirpSync = (__nccwpck_require__(2544).mkdirsSync) +const utimesSync = (__nccwpck_require__(7113).utimesMillisSync) +const stat = __nccwpck_require__(5799) + +function copySync (src, dest, opts) { + if (typeof opts === 'function') { + opts = { filter: opts } + } + + opts = opts || {} + opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now + opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber + + // Warn about using preserveTimestamps on 32-bit node + if (opts.preserveTimestamps && process.arch === 'ia32') { + console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n + see https://github.com/jprichardson/node-fs-extra/issues/269`) + } + + const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy') + stat.checkParentPathsSync(src, srcStat, dest, 'copy') + return handleFilterAndCopy(destStat, src, dest, opts) +} + +function handleFilterAndCopy (destStat, src, dest, opts) { + if (opts.filter && !opts.filter(src, dest)) return + const destParent = path.dirname(dest) + if (!fs.existsSync(destParent)) mkdirpSync(destParent) + return startCopy(destStat, src, dest, opts) +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter && !opts.filter(src, dest)) return + return getStats(destStat, src, dest, opts) +} + +function getStats (destStat, src, dest, opts) { + const statSync = opts.dereference ? fs.statSync : fs.lstatSync + const srcStat = statSync(src) + + if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) + else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) + else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) return copyFile(srcStat, src, dest, opts) + return mayCopyFile(srcStat, src, dest, opts) +} + +function mayCopyFile (srcStat, src, dest, opts) { + if (opts.overwrite) { + fs.unlinkSync(dest) + return copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new Error(`'${dest}' already exists`) + } +} + +function copyFile (srcStat, src, dest, opts) { + if (typeof fs.copyFileSync === 'function') { + fs.copyFileSync(src, dest) + fs.chmodSync(dest, srcStat.mode) + if (opts.preserveTimestamps) { + return utimesSync(dest, srcStat.atime, srcStat.mtime) + } + return + } + return copyFileFallback(srcStat, src, dest, opts) +} + +function copyFileFallback (srcStat, src, dest, opts) { + const BUF_LENGTH = 64 * 1024 + const _buff = __nccwpck_require__(9956)(BUF_LENGTH) + + const fdr = fs.openSync(src, 'r') + const fdw = fs.openSync(dest, 'w', srcStat.mode) + let pos = 0 + + while (pos < srcStat.size) { + const bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos) + fs.writeSync(fdw, _buff, 0, bytesRead) + pos += bytesRead + } + + if (opts.preserveTimestamps) fs.futimesSync(fdw, srcStat.atime, srcStat.mtime) + + fs.closeSync(fdr) + fs.closeSync(fdw) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts) + if (destStat && !destStat.isDirectory()) { + throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) + } + return copyDir(src, dest, opts) +} + +function mkDirAndCopy (srcStat, src, dest, opts) { + fs.mkdirSync(dest) + copyDir(src, dest, opts) + return fs.chmodSync(dest, srcStat.mode) +} + +function copyDir (src, dest, opts) { + fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) +} + +function copyDirItem (item, src, dest, opts) { + const srcItem = path.join(src, item) + const destItem = path.join(dest, item) + const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy') + return startCopy(destStat, srcItem, destItem, opts) +} + +function onLink (destStat, src, dest, opts) { + let resolvedSrc = fs.readlinkSync(src) + if (opts.dereference) { + resolvedSrc = path.resolve(process.cwd(), resolvedSrc) + } + + if (!destStat) { + return fs.symlinkSync(resolvedSrc, dest) + } else { + let resolvedDest + try { + resolvedDest = fs.readlinkSync(dest) + } catch (err) { + // dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) + throw err + } + if (opts.dereference) { + resolvedDest = path.resolve(process.cwd(), resolvedDest) + } + if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) + } + + // prevent copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) + } + return copyLink(resolvedSrc, dest) + } +} + +function copyLink (resolvedSrc, dest) { + fs.unlinkSync(dest) + return fs.symlinkSync(resolvedSrc, dest) +} + +module.exports = copySync + + +/***/ }), + +/***/ 9772: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +module.exports = { + copySync: __nccwpck_require__(1518) +} + + +/***/ }), + +/***/ 2228: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const mkdirp = (__nccwpck_require__(2544).mkdirs) +const pathExists = (__nccwpck_require__(489).pathExists) +const utimes = (__nccwpck_require__(7113).utimesMillis) +const stat = __nccwpck_require__(5799) + +function copy (src, dest, opts, cb) { + if (typeof opts === 'function' && !cb) { + cb = opts + opts = {} + } else if (typeof opts === 'function') { + opts = { filter: opts } + } + + cb = cb || function () {} + opts = opts || {} + + opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now + opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber + + // Warn about using preserveTimestamps on 32-bit node + if (opts.preserveTimestamps && process.arch === 'ia32') { + console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n + see https://github.com/jprichardson/node-fs-extra/issues/269`) + } + + stat.checkPaths(src, dest, 'copy', (err, stats) => { + if (err) return cb(err) + const { srcStat, destStat } = stats + stat.checkParentPaths(src, srcStat, dest, 'copy', err => { + if (err) return cb(err) + if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) + return checkParentDir(destStat, src, dest, opts, cb) + }) + }) +} + +function checkParentDir (destStat, src, dest, opts, cb) { + const destParent = path.dirname(dest) + pathExists(destParent, (err, dirExists) => { + if (err) return cb(err) + if (dirExists) return startCopy(destStat, src, dest, opts, cb) + mkdirp(destParent, err => { + if (err) return cb(err) + return startCopy(destStat, src, dest, opts, cb) + }) + }) +} + +function handleFilter (onInclude, destStat, src, dest, opts, cb) { + Promise.resolve(opts.filter(src, dest)).then(include => { + if (include) return onInclude(destStat, src, dest, opts, cb) + return cb() + }, error => cb(error)) +} + +function startCopy (destStat, src, dest, opts, cb) { + if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) + return getStats(destStat, src, dest, opts, cb) +} + +function getStats (destStat, src, dest, opts, cb) { + const stat = opts.dereference ? fs.stat : fs.lstat + stat(src, (err, srcStat) => { + if (err) return cb(err) + + if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) + else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) + else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) + }) +} + +function onFile (srcStat, destStat, src, dest, opts, cb) { + if (!destStat) return copyFile(srcStat, src, dest, opts, cb) + return mayCopyFile(srcStat, src, dest, opts, cb) +} + +function mayCopyFile (srcStat, src, dest, opts, cb) { + if (opts.overwrite) { + fs.unlink(dest, err => { + if (err) return cb(err) + return copyFile(srcStat, src, dest, opts, cb) + }) + } else if (opts.errorOnExist) { + return cb(new Error(`'${dest}' already exists`)) + } else return cb() +} + +function copyFile (srcStat, src, dest, opts, cb) { + if (typeof fs.copyFile === 'function') { + return fs.copyFile(src, dest, err => { + if (err) return cb(err) + return setDestModeAndTimestamps(srcStat, dest, opts, cb) + }) + } + return copyFileFallback(srcStat, src, dest, opts, cb) +} + +function copyFileFallback (srcStat, src, dest, opts, cb) { + const rs = fs.createReadStream(src) + rs.on('error', err => cb(err)).once('open', () => { + const ws = fs.createWriteStream(dest, { mode: srcStat.mode }) + ws.on('error', err => cb(err)) + .on('open', () => rs.pipe(ws)) + .once('close', () => setDestModeAndTimestamps(srcStat, dest, opts, cb)) + }) +} + +function setDestModeAndTimestamps (srcStat, dest, opts, cb) { + fs.chmod(dest, srcStat.mode, err => { + if (err) return cb(err) + if (opts.preserveTimestamps) { + return utimes(dest, srcStat.atime, srcStat.mtime, cb) + } + return cb() + }) +} + +function onDir (srcStat, destStat, src, dest, opts, cb) { + if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts, cb) + if (destStat && !destStat.isDirectory()) { + return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) + } + return copyDir(src, dest, opts, cb) +} + +function mkDirAndCopy (srcStat, src, dest, opts, cb) { + fs.mkdir(dest, err => { + if (err) return cb(err) + copyDir(src, dest, opts, err => { + if (err) return cb(err) + return fs.chmod(dest, srcStat.mode, cb) + }) + }) +} + +function copyDir (src, dest, opts, cb) { + fs.readdir(src, (err, items) => { + if (err) return cb(err) + return copyDirItems(items, src, dest, opts, cb) + }) +} + +function copyDirItems (items, src, dest, opts, cb) { + const item = items.pop() + if (!item) return cb() + return copyDirItem(items, item, src, dest, opts, cb) +} + +function copyDirItem (items, item, src, dest, opts, cb) { + const srcItem = path.join(src, item) + const destItem = path.join(dest, item) + stat.checkPaths(srcItem, destItem, 'copy', (err, stats) => { + if (err) return cb(err) + const { destStat } = stats + startCopy(destStat, srcItem, destItem, opts, err => { + if (err) return cb(err) + return copyDirItems(items, src, dest, opts, cb) + }) + }) +} + +function onLink (destStat, src, dest, opts, cb) { + fs.readlink(src, (err, resolvedSrc) => { + if (err) return cb(err) + if (opts.dereference) { + resolvedSrc = path.resolve(process.cwd(), resolvedSrc) + } + + if (!destStat) { + return fs.symlink(resolvedSrc, dest, cb) + } else { + fs.readlink(dest, (err, resolvedDest) => { + if (err) { + // dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) + return cb(err) + } + if (opts.dereference) { + resolvedDest = path.resolve(process.cwd(), resolvedDest) + } + if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { + return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) + } + + // do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { + return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) + } + return copyLink(resolvedSrc, dest, cb) + }) + } + }) +} + +function copyLink (resolvedSrc, dest, cb) { + fs.unlink(dest, err => { + if (err) return cb(err) + return fs.symlink(resolvedSrc, dest, cb) + }) +} + +module.exports = copy + + +/***/ }), + +/***/ 349: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +module.exports = { + copy: u(__nccwpck_require__(2228)) +} + + +/***/ }), + +/***/ 8280: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const mkdir = __nccwpck_require__(2544) +const remove = __nccwpck_require__(9033) + +const emptyDir = u(function emptyDir (dir, callback) { + callback = callback || function () {} + fs.readdir(dir, (err, items) => { + if (err) return mkdir.mkdirs(dir, callback) + + items = items.map(item => path.join(dir, item)) + + deleteItem() + + function deleteItem () { + const item = items.pop() + if (!item) return callback() + remove.remove(item, err => { + if (err) return callback(err) + deleteItem() + }) + } + }) +}) + +function emptyDirSync (dir) { + let items + try { + items = fs.readdirSync(dir) + } catch (err) { + return mkdir.mkdirsSync(dir) + } + + items.forEach(item => { + item = path.join(dir, item) + remove.removeSync(item) + }) +} + +module.exports = { + emptyDirSync, + emptydirSync: emptyDirSync, + emptyDir, + emptydir: emptyDir +} + + +/***/ }), + +/***/ 4445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const path = __nccwpck_require__(1017) +const fs = __nccwpck_require__(494) +const mkdir = __nccwpck_require__(2544) +const pathExists = (__nccwpck_require__(489).pathExists) + +function createFile (file, callback) { + function makeFile () { + fs.writeFile(file, '', err => { + if (err) return callback(err) + callback() + }) + } + + fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err + if (!err && stats.isFile()) return callback() + const dir = path.dirname(file) + pathExists(dir, (err, dirExists) => { + if (err) return callback(err) + if (dirExists) return makeFile() + mkdir.mkdirs(dir, err => { + if (err) return callback(err) + makeFile() + }) + }) + }) +} + +function createFileSync (file) { + let stats + try { + stats = fs.statSync(file) + } catch (e) {} + if (stats && stats.isFile()) return + + const dir = path.dirname(file) + if (!fs.existsSync(dir)) { + mkdir.mkdirsSync(dir) + } + + fs.writeFileSync(file, '') +} + +module.exports = { + createFile: u(createFile), + createFileSync +} + + +/***/ }), + +/***/ 4747: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const file = __nccwpck_require__(4445) +const link = __nccwpck_require__(128) +const symlink = __nccwpck_require__(9664) + +module.exports = { + // file + createFile: file.createFile, + createFileSync: file.createFileSync, + ensureFile: file.createFile, + ensureFileSync: file.createFileSync, + // link + createLink: link.createLink, + createLinkSync: link.createLinkSync, + ensureLink: link.createLink, + ensureLinkSync: link.createLinkSync, + // symlink + createSymlink: symlink.createSymlink, + createSymlinkSync: symlink.createSymlinkSync, + ensureSymlink: symlink.createSymlink, + ensureSymlinkSync: symlink.createSymlinkSync +} + + +/***/ }), + +/***/ 128: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const path = __nccwpck_require__(1017) +const fs = __nccwpck_require__(494) +const mkdir = __nccwpck_require__(2544) +const pathExists = (__nccwpck_require__(489).pathExists) + +function createLink (srcpath, dstpath, callback) { + function makeLink (srcpath, dstpath) { + fs.link(srcpath, dstpath, err => { + if (err) return callback(err) + callback(null) + }) + } + + pathExists(dstpath, (err, destinationExists) => { + if (err) return callback(err) + if (destinationExists) return callback(null) + fs.lstat(srcpath, (err) => { + if (err) { + err.message = err.message.replace('lstat', 'ensureLink') + return callback(err) + } + + const dir = path.dirname(dstpath) + pathExists(dir, (err, dirExists) => { + if (err) return callback(err) + if (dirExists) return makeLink(srcpath, dstpath) + mkdir.mkdirs(dir, err => { + if (err) return callback(err) + makeLink(srcpath, dstpath) + }) + }) + }) + }) +} + +function createLinkSync (srcpath, dstpath) { + const destinationExists = fs.existsSync(dstpath) + if (destinationExists) return undefined + + try { + fs.lstatSync(srcpath) + } catch (err) { + err.message = err.message.replace('lstat', 'ensureLink') + throw err + } + + const dir = path.dirname(dstpath) + const dirExists = fs.existsSync(dir) + if (dirExists) return fs.linkSync(srcpath, dstpath) + mkdir.mkdirsSync(dir) + + return fs.linkSync(srcpath, dstpath) +} + +module.exports = { + createLink: u(createLink), + createLinkSync +} + + +/***/ }), + +/***/ 8891: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const path = __nccwpck_require__(1017) +const fs = __nccwpck_require__(494) +const pathExists = (__nccwpck_require__(489).pathExists) + +/** + * Function that returns two types of paths, one relative to symlink, and one + * relative to the current working directory. Checks if path is absolute or + * relative. If the path is relative, this function checks if the path is + * relative to symlink or relative to current working directory. This is an + * initiative to find a smarter `srcpath` to supply when building symlinks. + * This allows you to determine which path to use out of one of three possible + * types of source paths. The first is an absolute path. This is detected by + * `path.isAbsolute()`. When an absolute path is provided, it is checked to + * see if it exists. If it does it's used, if not an error is returned + * (callback)/ thrown (sync). The other two options for `srcpath` are a + * relative url. By default Node's `fs.symlink` works by creating a symlink + * using `dstpath` and expects the `srcpath` to be relative to the newly + * created symlink. If you provide a `srcpath` that does not exist on the file + * system it results in a broken symlink. To minimize this, the function + * checks to see if the 'relative to symlink' source file exists, and if it + * does it will use it. If it does not, it checks if there's a file that + * exists that is relative to the current working directory, if does its used. + * This preserves the expectations of the original fs.symlink spec and adds + * the ability to pass in `relative to current working direcotry` paths. + */ + +function symlinkPaths (srcpath, dstpath, callback) { + if (path.isAbsolute(srcpath)) { + return fs.lstat(srcpath, (err) => { + if (err) { + err.message = err.message.replace('lstat', 'ensureSymlink') + return callback(err) + } + return callback(null, { + 'toCwd': srcpath, + 'toDst': srcpath + }) + }) + } else { + const dstdir = path.dirname(dstpath) + const relativeToDst = path.join(dstdir, srcpath) + return pathExists(relativeToDst, (err, exists) => { + if (err) return callback(err) + if (exists) { + return callback(null, { + 'toCwd': relativeToDst, + 'toDst': srcpath + }) + } else { + return fs.lstat(srcpath, (err) => { + if (err) { + err.message = err.message.replace('lstat', 'ensureSymlink') + return callback(err) + } + return callback(null, { + 'toCwd': srcpath, + 'toDst': path.relative(dstdir, srcpath) + }) + }) + } + }) + } +} + +function symlinkPathsSync (srcpath, dstpath) { + let exists + if (path.isAbsolute(srcpath)) { + exists = fs.existsSync(srcpath) + if (!exists) throw new Error('absolute srcpath does not exist') + return { + 'toCwd': srcpath, + 'toDst': srcpath + } + } else { + const dstdir = path.dirname(dstpath) + const relativeToDst = path.join(dstdir, srcpath) + exists = fs.existsSync(relativeToDst) + if (exists) { + return { + 'toCwd': relativeToDst, + 'toDst': srcpath + } + } else { + exists = fs.existsSync(srcpath) + if (!exists) throw new Error('relative srcpath does not exist') + return { + 'toCwd': srcpath, + 'toDst': path.relative(dstdir, srcpath) + } + } + } +} + +module.exports = { + symlinkPaths, + symlinkPathsSync +} + + +/***/ }), + +/***/ 8025: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) + +function symlinkType (srcpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type + if (type) return callback(null, type) + fs.lstat(srcpath, (err, stats) => { + if (err) return callback(null, 'file') + type = (stats && stats.isDirectory()) ? 'dir' : 'file' + callback(null, type) + }) +} + +function symlinkTypeSync (srcpath, type) { + let stats + + if (type) return type + try { + stats = fs.lstatSync(srcpath) + } catch (e) { + return 'file' + } + return (stats && stats.isDirectory()) ? 'dir' : 'file' +} + +module.exports = { + symlinkType, + symlinkTypeSync +} + + +/***/ }), + +/***/ 9664: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const path = __nccwpck_require__(1017) +const fs = __nccwpck_require__(494) +const _mkdirs = __nccwpck_require__(2544) +const mkdirs = _mkdirs.mkdirs +const mkdirsSync = _mkdirs.mkdirsSync + +const _symlinkPaths = __nccwpck_require__(8891) +const symlinkPaths = _symlinkPaths.symlinkPaths +const symlinkPathsSync = _symlinkPaths.symlinkPathsSync + +const _symlinkType = __nccwpck_require__(8025) +const symlinkType = _symlinkType.symlinkType +const symlinkTypeSync = _symlinkType.symlinkTypeSync + +const pathExists = (__nccwpck_require__(489).pathExists) + +function createSymlink (srcpath, dstpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type + + pathExists(dstpath, (err, destinationExists) => { + if (err) return callback(err) + if (destinationExists) return callback(null) + symlinkPaths(srcpath, dstpath, (err, relative) => { + if (err) return callback(err) + srcpath = relative.toDst + symlinkType(relative.toCwd, type, (err, type) => { + if (err) return callback(err) + const dir = path.dirname(dstpath) + pathExists(dir, (err, dirExists) => { + if (err) return callback(err) + if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) + mkdirs(dir, err => { + if (err) return callback(err) + fs.symlink(srcpath, dstpath, type, callback) + }) + }) + }) + }) + }) +} + +function createSymlinkSync (srcpath, dstpath, type) { + const destinationExists = fs.existsSync(dstpath) + if (destinationExists) return undefined + + const relative = symlinkPathsSync(srcpath, dstpath) + srcpath = relative.toDst + type = symlinkTypeSync(relative.toCwd, type) + const dir = path.dirname(dstpath) + const exists = fs.existsSync(dir) + if (exists) return fs.symlinkSync(srcpath, dstpath, type) + mkdirsSync(dir) + return fs.symlinkSync(srcpath, dstpath, type) +} + +module.exports = { + createSymlink: u(createSymlink), + createSymlinkSync +} + + +/***/ }), + +/***/ 754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// This is adapted from https://github.com/normalize/mz +// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const fs = __nccwpck_require__(494) + +const api = [ + 'access', + 'appendFile', + 'chmod', + 'chown', + 'close', + 'copyFile', + 'fchmod', + 'fchown', + 'fdatasync', + 'fstat', + 'fsync', + 'ftruncate', + 'futimes', + 'lchown', + 'lchmod', + 'link', + 'lstat', + 'mkdir', + 'mkdtemp', + 'open', + 'readFile', + 'readdir', + 'readlink', + 'realpath', + 'rename', + 'rmdir', + 'stat', + 'symlink', + 'truncate', + 'unlink', + 'utimes', + 'writeFile' +].filter(key => { + // Some commands are not available on some systems. Ex: + // fs.copyFile was added in Node.js v8.5.0 + // fs.mkdtemp was added in Node.js v5.10.0 + // fs.lchown is not available on at least some Linux + return typeof fs[key] === 'function' +}) + +// Export all keys: +Object.keys(fs).forEach(key => { + if (key === 'promises') { + // fs.promises is a getter property that triggers ExperimentalWarning + // Don't re-export it here, the getter is defined in "lib/index.js" + return + } + exports[key] = fs[key] +}) + +// Universalify async methods: +api.forEach(method => { + exports[method] = u(fs[method]) +}) + +// We differ from mz/fs in that we still ship the old, broken, fs.exists() +// since we are a drop-in replacement for the native module +exports.exists = function (filename, callback) { + if (typeof callback === 'function') { + return fs.exists(filename, callback) + } + return new Promise(resolve => { + return fs.exists(filename, resolve) + }) +} + +// fs.read() & fs.write need special treatment due to multiple callback args + +exports.read = function (fd, buffer, offset, length, position, callback) { + if (typeof callback === 'function') { + return fs.read(fd, buffer, offset, length, position, callback) + } + return new Promise((resolve, reject) => { + fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { + if (err) return reject(err) + resolve({ bytesRead, buffer }) + }) + }) +} + +// Function signature can be +// fs.write(fd, buffer[, offset[, length[, position]]], callback) +// OR +// fs.write(fd, string[, position[, encoding]], callback) +// We need to handle both cases, so we use ...args +exports.write = function (fd, buffer, ...args) { + if (typeof args[args.length - 1] === 'function') { + return fs.write(fd, buffer, ...args) + } + + return new Promise((resolve, reject) => { + fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { + if (err) return reject(err) + resolve({ bytesWritten, buffer }) + }) + }) +} + +// fs.realpath.native only available in Node v9.2+ +if (typeof fs.realpath.native === 'function') { + exports.realpath.native = u(fs.realpath.native) +} + + +/***/ }), + +/***/ 77: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +module.exports = Object.assign( + {}, + // Export promiseified graceful-fs: + __nccwpck_require__(754), + // Export extra methods: + __nccwpck_require__(9772), + __nccwpck_require__(349), + __nccwpck_require__(8280), + __nccwpck_require__(4747), + __nccwpck_require__(3727), + __nccwpck_require__(2544), + __nccwpck_require__(6365), + __nccwpck_require__(2858), + __nccwpck_require__(4480), + __nccwpck_require__(489), + __nccwpck_require__(9033) +) + +// Export fs.promises as a getter property so that we don't trigger +// ExperimentalWarning before fs.promises is actually accessed. +const fs = __nccwpck_require__(7147) +if (Object.getOwnPropertyDescriptor(fs, 'promises')) { + Object.defineProperty(module.exports, "promises", ({ + get () { return fs.promises } + })) +} + + +/***/ }), + +/***/ 3727: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const jsonFile = __nccwpck_require__(2209) + +jsonFile.outputJson = u(__nccwpck_require__(764)) +jsonFile.outputJsonSync = __nccwpck_require__(3525) +// aliases +jsonFile.outputJSON = jsonFile.outputJson +jsonFile.outputJSONSync = jsonFile.outputJsonSync +jsonFile.writeJSON = jsonFile.writeJson +jsonFile.writeJSONSync = jsonFile.writeJsonSync +jsonFile.readJSON = jsonFile.readJson +jsonFile.readJSONSync = jsonFile.readJsonSync + +module.exports = jsonFile + + +/***/ }), + +/***/ 2209: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const jsonFile = __nccwpck_require__(5822) + +module.exports = { + // jsonfile exports + readJson: u(jsonFile.readFile), + readJsonSync: jsonFile.readFileSync, + writeJson: u(jsonFile.writeFile), + writeJsonSync: jsonFile.writeFileSync +} + + +/***/ }), + +/***/ 3525: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const mkdir = __nccwpck_require__(2544) +const jsonFile = __nccwpck_require__(2209) + +function outputJsonSync (file, data, options) { + const dir = path.dirname(file) + + if (!fs.existsSync(dir)) { + mkdir.mkdirsSync(dir) + } + + jsonFile.writeJsonSync(file, data, options) +} + +module.exports = outputJsonSync + + +/***/ }), + +/***/ 764: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const path = __nccwpck_require__(1017) +const mkdir = __nccwpck_require__(2544) +const pathExists = (__nccwpck_require__(489).pathExists) +const jsonFile = __nccwpck_require__(2209) + +function outputJson (file, data, options, callback) { + if (typeof options === 'function') { + callback = options + options = {} + } + + const dir = path.dirname(file) + + pathExists(dir, (err, itDoes) => { + if (err) return callback(err) + if (itDoes) return jsonFile.writeJson(file, data, options, callback) + + mkdir.mkdirs(dir, err => { + if (err) return callback(err) + jsonFile.writeJson(file, data, options, callback) + }) + }) +} + +module.exports = outputJson + + +/***/ }), + +/***/ 2544: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const mkdirs = u(__nccwpck_require__(4147)) +const mkdirsSync = __nccwpck_require__(3643) + +module.exports = { + mkdirs, + mkdirsSync, + // alias + mkdirp: mkdirs, + mkdirpSync: mkdirsSync, + ensureDir: mkdirs, + ensureDirSync: mkdirsSync +} + + +/***/ }), + +/***/ 3643: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const invalidWin32Path = (__nccwpck_require__(3415).invalidWin32Path) + +const o777 = parseInt('0777', 8) + +function mkdirsSync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts } + } + + let mode = opts.mode + const xfs = opts.fs || fs + + if (process.platform === 'win32' && invalidWin32Path(p)) { + const errInval = new Error(p + ' contains invalid WIN32 path characters.') + errInval.code = 'EINVAL' + throw errInval + } + + if (mode === undefined) { + mode = o777 & (~process.umask()) + } + if (!made) made = null + + p = path.resolve(p) + + try { + xfs.mkdirSync(p, mode) + made = made || p + } catch (err0) { + if (err0.code === 'ENOENT') { + if (path.dirname(p) === p) throw err0 + made = mkdirsSync(path.dirname(p), opts, made) + mkdirsSync(p, opts, made) + } else { + // In the case of any other error, just see if there's a dir there + // already. If so, then hooray! If not, then something is borked. + let stat + try { + stat = xfs.statSync(p) + } catch (err1) { + throw err0 + } + if (!stat.isDirectory()) throw err0 + } + } + + return made +} + +module.exports = mkdirsSync + + +/***/ }), + +/***/ 4147: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const invalidWin32Path = (__nccwpck_require__(3415).invalidWin32Path) + +const o777 = parseInt('0777', 8) + +function mkdirs (p, opts, callback, made) { + if (typeof opts === 'function') { + callback = opts + opts = {} + } else if (!opts || typeof opts !== 'object') { + opts = { mode: opts } + } + + if (process.platform === 'win32' && invalidWin32Path(p)) { + const errInval = new Error(p + ' contains invalid WIN32 path characters.') + errInval.code = 'EINVAL' + return callback(errInval) + } + + let mode = opts.mode + const xfs = opts.fs || fs + + if (mode === undefined) { + mode = o777 & (~process.umask()) + } + if (!made) made = null + + callback = callback || function () {} + p = path.resolve(p) + + xfs.mkdir(p, mode, er => { + if (!er) { + made = made || p + return callback(null, made) + } + switch (er.code) { + case 'ENOENT': + if (path.dirname(p) === p) return callback(er) + mkdirs(path.dirname(p), opts, (er, made) => { + if (er) callback(er, made) + else mkdirs(p, opts, callback, made) + }) + break + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, (er2, stat) => { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) callback(er, made) + else callback(null, made) + }) + break + } + }) +} + +module.exports = mkdirs + + +/***/ }), + +/***/ 3415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const path = __nccwpck_require__(1017) + +// get drive on windows +function getRootPath (p) { + p = path.normalize(path.resolve(p)).split(path.sep) + if (p.length > 0) return p[0] + return null +} + +// http://stackoverflow.com/a/62888/10333 contains more accurate +// TODO: expand to include the rest +const INVALID_PATH_CHARS = /[<>:"|?*]/ + +function invalidWin32Path (p) { + const rp = getRootPath(p) + p = p.replace(rp, '') + return INVALID_PATH_CHARS.test(p) +} + +module.exports = { + getRootPath, + invalidWin32Path +} + + +/***/ }), + +/***/ 6365: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +module.exports = { + moveSync: __nccwpck_require__(4203) +} + + +/***/ }), + +/***/ 4203: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const copySync = (__nccwpck_require__(9772).copySync) +const removeSync = (__nccwpck_require__(9033).removeSync) +const mkdirpSync = (__nccwpck_require__(2544).mkdirpSync) +const stat = __nccwpck_require__(5799) + +function moveSync (src, dest, opts) { + opts = opts || {} + const overwrite = opts.overwrite || opts.clobber || false + + const { srcStat } = stat.checkPathsSync(src, dest, 'move') + stat.checkParentPathsSync(src, srcStat, dest, 'move') + mkdirpSync(path.dirname(dest)) + return doRename(src, dest, overwrite) +} + +function doRename (src, dest, overwrite) { + if (overwrite) { + removeSync(dest) + return rename(src, dest, overwrite) + } + if (fs.existsSync(dest)) throw new Error('dest already exists.') + return rename(src, dest, overwrite) +} + +function rename (src, dest, overwrite) { + try { + fs.renameSync(src, dest) + } catch (err) { + if (err.code !== 'EXDEV') throw err + return moveAcrossDevice(src, dest, overwrite) + } +} + +function moveAcrossDevice (src, dest, overwrite) { + const opts = { + overwrite, + errorOnExist: true + } + copySync(src, dest, opts) + return removeSync(src) +} + +module.exports = moveSync + + +/***/ }), + +/***/ 2858: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +module.exports = { + move: u(__nccwpck_require__(5283)) +} + + +/***/ }), + +/***/ 5283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const copy = (__nccwpck_require__(349).copy) +const remove = (__nccwpck_require__(9033).remove) +const mkdirp = (__nccwpck_require__(2544).mkdirp) +const pathExists = (__nccwpck_require__(489).pathExists) +const stat = __nccwpck_require__(5799) + +function move (src, dest, opts, cb) { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + const overwrite = opts.overwrite || opts.clobber || false + + stat.checkPaths(src, dest, 'move', (err, stats) => { + if (err) return cb(err) + const { srcStat } = stats + stat.checkParentPaths(src, srcStat, dest, 'move', err => { + if (err) return cb(err) + mkdirp(path.dirname(dest), err => { + if (err) return cb(err) + return doRename(src, dest, overwrite, cb) + }) + }) + }) +} + +function doRename (src, dest, overwrite, cb) { + if (overwrite) { + return remove(dest, err => { + if (err) return cb(err) + return rename(src, dest, overwrite, cb) + }) + } + pathExists(dest, (err, destExists) => { + if (err) return cb(err) + if (destExists) return cb(new Error('dest already exists.')) + return rename(src, dest, overwrite, cb) + }) +} + +function rename (src, dest, overwrite, cb) { + fs.rename(src, dest, err => { + if (!err) return cb() + if (err.code !== 'EXDEV') return cb(err) + return moveAcrossDevice(src, dest, overwrite, cb) + }) +} + +function moveAcrossDevice (src, dest, overwrite, cb) { + const opts = { + overwrite, + errorOnExist: true + } + copy(src, dest, opts, err => { + if (err) return cb(err) + return remove(src, cb) + }) +} + +module.exports = move + + +/***/ }), + +/***/ 4480: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const mkdir = __nccwpck_require__(2544) +const pathExists = (__nccwpck_require__(489).pathExists) + +function outputFile (file, data, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = 'utf8' + } + + const dir = path.dirname(file) + pathExists(dir, (err, itDoes) => { + if (err) return callback(err) + if (itDoes) return fs.writeFile(file, data, encoding, callback) + + mkdir.mkdirs(dir, err => { + if (err) return callback(err) + + fs.writeFile(file, data, encoding, callback) + }) + }) +} + +function outputFileSync (file, ...args) { + const dir = path.dirname(file) + if (fs.existsSync(dir)) { + return fs.writeFileSync(file, ...args) + } + mkdir.mkdirsSync(dir) + fs.writeFileSync(file, ...args) +} + +module.exports = { + outputFile: u(outputFile), + outputFileSync +} + + +/***/ }), + +/***/ 489: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const u = (__nccwpck_require__(6350)/* .fromPromise */ .p) +const fs = __nccwpck_require__(754) + +function pathExists (path) { + return fs.access(path).then(() => true).catch(() => false) +} + +module.exports = { + pathExists: u(pathExists), + pathExistsSync: fs.existsSync +} + + +/***/ }), + +/***/ 9033: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const u = (__nccwpck_require__(6350)/* .fromCallback */ .E) +const rimraf = __nccwpck_require__(2836) + +module.exports = { + remove: u(rimraf), + removeSync: rimraf.sync +} + + +/***/ }), + +/***/ 2836: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) +const assert = __nccwpck_require__(9491) + +const isWindows = (process.platform === 'win32') + +function defaults (options) { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 +} + +function rimraf (p, options, cb) { + let busyTries = 0 + + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') + assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && + busyTries < options.maxBusyTries) { + busyTries++ + const time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), time) + } + + // already gone + if (er.code === 'ENOENT') er = null + } + + cb(er) + }) +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === 'ENOENT') { + return cb(null) + } + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === 'EPERM' && isWindows) { + return fixWinEPERM(p, options, er, cb) + } + + if (st && st.isDirectory()) { + return rmdir(p, options, er, cb) + } + + options.unlink(p, er => { + if (er) { + if (er.code === 'ENOENT') { + return cb(null) + } + if (er.code === 'EPERM') { + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + } + if (er.code === 'EISDIR') { + return rmdir(p, options, er, cb) + } + } + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) { + assert(er instanceof Error) + } + + options.chmod(p, 0o666, er2 => { + if (er2) { + cb(er2.code === 'ENOENT' ? null : er) + } else { + options.stat(p, (er3, stats) => { + if (er3) { + cb(er3.code === 'ENOENT' ? null : er) + } else if (stats.isDirectory()) { + rmdir(p, options, er, cb) + } else { + options.unlink(p, cb) + } + }) + } + }) +} + +function fixWinEPERMSync (p, options, er) { + let stats + + assert(p) + assert(options) + if (er) { + assert(er instanceof Error) + } + + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === 'ENOENT') { + return + } else { + throw er + } + } + + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === 'ENOENT') { + return + } else { + throw er + } + } + + if (stats.isDirectory()) { + rmdirSync(p, options, er) + } else { + options.unlinkSync(p) + } +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) { + assert(originalEr instanceof Error) + } + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { + rmkids(p, options, cb) + } else if (er && er.code === 'ENOTDIR') { + cb(originalEr) + } else { + cb(er) + } + }) +} + +function rmkids (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, (er, files) => { + if (er) return cb(er) + + let n = files.length + let errState + + if (n === 0) return options.rmdir(p, cb) + + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) { + return + } + if (er) return cb(errState = er) + if (--n === 0) { + options.rmdir(p, cb) + } + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + let st + + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') + + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === 'ENOENT') { + return + } + + // Windows can EPERM on stat. Life is suffering. + if (er.code === 'EPERM' && isWindows) { + fixWinEPERMSync(p, options, er) + } + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) { + rmdirSync(p, options, null) + } else { + options.unlinkSync(p) + } + } catch (er) { + if (er.code === 'ENOENT') { + return + } else if (er.code === 'EPERM') { + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + } else if (er.code !== 'EISDIR') { + throw er + } + rmdirSync(p, options, er) + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) { + assert(originalEr instanceof Error) + } + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === 'ENOTDIR') { + throw originalEr + } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { + rmkidsSync(p, options) + } else if (er.code !== 'ENOENT') { + throw er + } + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) + + if (isWindows) { + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const startTime = Date.now() + do { + try { + const ret = options.rmdirSync(p, options) + return ret + } catch (er) { } + } while (Date.now() - startTime < 500) // give up after 500ms + } else { + const ret = options.rmdirSync(p, options) + return ret + } +} + +module.exports = rimraf +rimraf.sync = rimrafSync + + +/***/ }), + +/***/ 9956: +/***/ ((module) => { + +"use strict"; + +/* eslint-disable node/no-deprecated-api */ +module.exports = function (size) { + if (typeof Buffer.allocUnsafe === 'function') { + try { + return Buffer.allocUnsafe(size) + } catch (e) { + return new Buffer(size) + } + } + return new Buffer(size) +} + + +/***/ }), + +/***/ 5799: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const path = __nccwpck_require__(1017) + +const NODE_VERSION_MAJOR_WITH_BIGINT = 10 +const NODE_VERSION_MINOR_WITH_BIGINT = 5 +const NODE_VERSION_PATCH_WITH_BIGINT = 0 +const nodeVersion = process.versions.node.split('.') +const nodeVersionMajor = Number.parseInt(nodeVersion[0], 10) +const nodeVersionMinor = Number.parseInt(nodeVersion[1], 10) +const nodeVersionPatch = Number.parseInt(nodeVersion[2], 10) + +function nodeSupportsBigInt () { + if (nodeVersionMajor > NODE_VERSION_MAJOR_WITH_BIGINT) { + return true + } else if (nodeVersionMajor === NODE_VERSION_MAJOR_WITH_BIGINT) { + if (nodeVersionMinor > NODE_VERSION_MINOR_WITH_BIGINT) { + return true + } else if (nodeVersionMinor === NODE_VERSION_MINOR_WITH_BIGINT) { + if (nodeVersionPatch >= NODE_VERSION_PATCH_WITH_BIGINT) { + return true + } + } + } + return false +} + +function getStats (src, dest, cb) { + if (nodeSupportsBigInt()) { + fs.stat(src, { bigint: true }, (err, srcStat) => { + if (err) return cb(err) + fs.stat(dest, { bigint: true }, (err, destStat) => { + if (err) { + if (err.code === 'ENOENT') return cb(null, { srcStat, destStat: null }) + return cb(err) + } + return cb(null, { srcStat, destStat }) + }) + }) + } else { + fs.stat(src, (err, srcStat) => { + if (err) return cb(err) + fs.stat(dest, (err, destStat) => { + if (err) { + if (err.code === 'ENOENT') return cb(null, { srcStat, destStat: null }) + return cb(err) + } + return cb(null, { srcStat, destStat }) + }) + }) + } +} + +function getStatsSync (src, dest) { + let srcStat, destStat + if (nodeSupportsBigInt()) { + srcStat = fs.statSync(src, { bigint: true }) + } else { + srcStat = fs.statSync(src) + } + try { + if (nodeSupportsBigInt()) { + destStat = fs.statSync(dest, { bigint: true }) + } else { + destStat = fs.statSync(dest) + } + } catch (err) { + if (err.code === 'ENOENT') return { srcStat, destStat: null } + throw err + } + return { srcStat, destStat } +} + +function checkPaths (src, dest, funcName, cb) { + getStats(src, dest, (err, stats) => { + if (err) return cb(err) + const { srcStat, destStat } = stats + if (destStat && destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { + return cb(new Error('Source and destination must not be the same.')) + } + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return cb(null, { srcStat, destStat }) + }) +} + +function checkPathsSync (src, dest, funcName) { + const { srcStat, destStat } = getStatsSync(src, dest) + if (destStat && destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { + throw new Error('Source and destination must not be the same.') + } + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new Error(errMsg(src, dest, funcName)) + } + return { srcStat, destStat } +} + +// recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +function checkParentPaths (src, srcStat, dest, funcName, cb) { + const srcParent = path.resolve(path.dirname(src)) + const destParent = path.resolve(path.dirname(dest)) + if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() + if (nodeSupportsBigInt()) { + fs.stat(destParent, { bigint: true }, (err, destStat) => { + if (err) { + if (err.code === 'ENOENT') return cb() + return cb(err) + } + if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return checkParentPaths(src, srcStat, destParent, funcName, cb) + }) + } else { + fs.stat(destParent, (err, destStat) => { + if (err) { + if (err.code === 'ENOENT') return cb() + return cb(err) + } + if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return checkParentPaths(src, srcStat, destParent, funcName, cb) + }) + } +} + +function checkParentPathsSync (src, srcStat, dest, funcName) { + const srcParent = path.resolve(path.dirname(src)) + const destParent = path.resolve(path.dirname(dest)) + if (destParent === srcParent || destParent === path.parse(destParent).root) return + let destStat + try { + if (nodeSupportsBigInt()) { + destStat = fs.statSync(destParent, { bigint: true }) + } else { + destStat = fs.statSync(destParent) + } + } catch (err) { + if (err.code === 'ENOENT') return + throw err + } + if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { + throw new Error(errMsg(src, dest, funcName)) + } + return checkParentPathsSync(src, srcStat, destParent, funcName) +} + +// return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = path.resolve(src).split(path.sep).filter(i => i) + const destArr = path.resolve(dest).split(path.sep).filter(i => i) + return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) +} + +function errMsg (src, dest, funcName) { + return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` +} + +module.exports = { + checkPaths, + checkPathsSync, + checkParentPaths, + checkParentPathsSync, + isSrcSubdir +} + + +/***/ }), + +/***/ 7113: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const fs = __nccwpck_require__(494) +const os = __nccwpck_require__(2037) +const path = __nccwpck_require__(1017) + +// HFS, ext{2,3}, FAT do not, Node.js v0.10 does not +function hasMillisResSync () { + let tmpfile = path.join('millis-test-sync' + Date.now().toString() + Math.random().toString().slice(2)) + tmpfile = path.join(os.tmpdir(), tmpfile) + + // 550 millis past UNIX epoch + const d = new Date(1435410243862) + fs.writeFileSync(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141') + const fd = fs.openSync(tmpfile, 'r+') + fs.futimesSync(fd, d, d) + fs.closeSync(fd) + return fs.statSync(tmpfile).mtime > 1435410243000 +} + +function hasMillisRes (callback) { + let tmpfile = path.join('millis-test' + Date.now().toString() + Math.random().toString().slice(2)) + tmpfile = path.join(os.tmpdir(), tmpfile) + + // 550 millis past UNIX epoch + const d = new Date(1435410243862) + fs.writeFile(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141', err => { + if (err) return callback(err) + fs.open(tmpfile, 'r+', (err, fd) => { + if (err) return callback(err) + fs.futimes(fd, d, d, err => { + if (err) return callback(err) + fs.close(fd, err => { + if (err) return callback(err) + fs.stat(tmpfile, (err, stats) => { + if (err) return callback(err) + callback(null, stats.mtime > 1435410243000) + }) + }) + }) + }) + }) +} + +function timeRemoveMillis (timestamp) { + if (typeof timestamp === 'number') { + return Math.floor(timestamp / 1000) * 1000 + } else if (timestamp instanceof Date) { + return new Date(Math.floor(timestamp.getTime() / 1000) * 1000) + } else { + throw new Error('fs-extra: timeRemoveMillis() unknown parameter type') + } +} + +function utimesMillis (path, atime, mtime, callback) { + // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) + fs.open(path, 'r+', (err, fd) => { + if (err) return callback(err) + fs.futimes(fd, atime, mtime, futimesErr => { + fs.close(fd, closeErr => { + if (callback) callback(futimesErr || closeErr) + }) + }) + }) +} + +function utimesMillisSync (path, atime, mtime) { + const fd = fs.openSync(path, 'r+') + fs.futimesSync(fd, atime, mtime) + return fs.closeSync(fd) +} + +module.exports = { + hasMillisRes, + hasMillisResSync, + timeRemoveMillis, + utimesMillis, + utimesMillisSync +} + + +/***/ }), + +/***/ 3889: +/***/ ((module) => { + +"use strict"; + + +module.exports = clone + +var getPrototypeOf = Object.getPrototypeOf || function (obj) { + return obj.__proto__ +} + +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj + + if (obj instanceof Object) + var copy = { __proto__: getPrototypeOf(obj) } + else + var copy = Object.create(null) + + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) + + return copy +} + + +/***/ }), + +/***/ 494: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var fs = __nccwpck_require__(7147) +var polyfills = __nccwpck_require__(9154) +var legacy = __nccwpck_require__(2723) +var clone = __nccwpck_require__(3889) + +var util = __nccwpck_require__(3837) + +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol + +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} + +function noop () {} + +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} + +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } + +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) + + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + resetQueue() + } + + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } + + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) + + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + resetQueue() + } + + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) + + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + __nccwpck_require__(9491).equal(fs[gracefulQueue].length, 0) + }) + } +} + +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} + +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; +} + +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch + + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb, startTime) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb, startTime) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$appendFile(path, data, options, cb) + + function go$appendFile (path, data, options, cb, startTime) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$copyFile = fs.copyFile + if (fs$copyFile) + fs.copyFile = copyFile + function copyFile (src, dest, flags, cb) { + if (typeof flags === 'function') { + cb = flags + flags = 0 + } + return go$copyFile(src, dest, flags, cb) + + function go$copyFile (src, dest, flags, cb, startTime) { + return fs$copyFile(src, dest, flags, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$readdir = fs.readdir + fs.readdir = readdir + var noReaddirOptionVersions = /^v[0-5]\./ + function readdir (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + var go$readdir = noReaddirOptionVersions.test(process.version) + ? function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, fs$readdirCallback( + path, options, cb, startTime + )) + } + : function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, options, fs$readdirCallback( + path, options, cb, startTime + )) + } + + return go$readdir(path, options, cb) + + function fs$readdirCallback (path, options, cb, startTime) { + return function (err, files) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([ + go$readdir, + [path, options, cb], + err, + startTime || Date.now(), + Date.now() + ]) + else { + if (files && files.sort) + files.sort() + + if (typeof cb === 'function') + cb.call(this, err, files) + } + } + } + } + + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } + + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + } + + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open + } + + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) + + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) + + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) + } + + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() + + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) + } + + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } + + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + } + }) + } + + function createReadStream (path, options) { + return new fs.ReadStream(path, options) + } + + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) + } + + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null + + return go$open(path, flags, mode, cb) + + function go$open (path, flags, mode, cb, startTime) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + return fs +} + +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) + retry() +} + +// keep track of the timeout between retry() calls +var retryTimer + +// reset the startTime and lastTime to now +// this resets the start of the 60 second overall timeout as well as the +// delay between attempts so that we'll retry these jobs sooner +function resetQueue () { + var now = Date.now() + for (var i = 0; i < fs[gracefulQueue].length; ++i) { + // entries that are only a length of 2 are from an older version, don't + // bother modifying those since they'll be retried anyway. + if (fs[gracefulQueue][i].length > 2) { + fs[gracefulQueue][i][3] = now // startTime + fs[gracefulQueue][i][4] = now // lastTime + } + } + // call retry to make sure we're actively processing the queue + retry() +} + +function retry () { + // clear the timer and remove it to help prevent unintended concurrency + clearTimeout(retryTimer) + retryTimer = undefined + + if (fs[gracefulQueue].length === 0) + return + + var elem = fs[gracefulQueue].shift() + var fn = elem[0] + var args = elem[1] + // these items may be unset if they were added by an older graceful-fs + var err = elem[2] + var startTime = elem[3] + var lastTime = elem[4] + + // if we don't have a startTime we have no way of knowing if we've waited + // long enough, so go ahead and retry this item now + if (startTime === undefined) { + debug('RETRY', fn.name, args) + fn.apply(null, args) + } else if (Date.now() - startTime >= 60000) { + // it's been more than 60 seconds total, bail now + debug('TIMEOUT', fn.name, args) + var cb = args.pop() + if (typeof cb === 'function') + cb.call(null, err) + } else { + // the amount of time between the last attempt and right now + var sinceAttempt = Date.now() - lastTime + // the amount of time between when we first tried, and when we last tried + // rounded up to at least 1 + var sinceStart = Math.max(lastTime - startTime, 1) + // backoff. wait longer than the total time we've been retrying, but only + // up to a maximum of 100ms + var desiredDelay = Math.min(sinceStart * 1.2, 100) + // it's been long enough since the last retry, do it again + if (sinceAttempt >= desiredDelay) { + debug('RETRY', fn.name, args) + fn.apply(null, args.concat([startTime])) + } else { + // if we can't do this job yet, push it to the end of the queue + // and let the next iteration check again + fs[gracefulQueue].push(elem) + } + } + + // schedule our next run if one isn't already scheduled + if (retryTimer === undefined) { + retryTimer = setTimeout(retry, 0) + } +} + + +/***/ }), + +/***/ 2723: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Stream = (__nccwpck_require__(2781).Stream) + +module.exports = legacy + +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } + + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); + + Stream.call(this); + + var self = this; + + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; + + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.encoding) this.setEncoding(this.encoding); + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } + + if (this.start > this.end) { + throw new Error('start must be <= end'); + } + + this.pos = this.start; + } + + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } + + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } + + self.fd = fd; + self.emit('open', fd); + self._read(); + }) + } + + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); + + Stream.call(this); + + this.path = path; + this.fd = null; + this.writable = true; + + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } + + this.pos = this.start; + } + + this.busy = false; + this._queue = []; + + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } + } +} + + +/***/ }), + +/***/ 9154: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var constants = __nccwpck_require__(2057) + +var origCwd = process.cwd +var cwd = null + +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) + + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) + + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) + + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (fs.chmod && !fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (fs.chown && !fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. + + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = typeof fs.rename !== 'function' ? fs.rename + : (function (fs$rename) { + function rename (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) + return rename + })(fs.rename) + } + + // if read() returns EAGAIN, then just try it again. + fs.read = typeof fs.read !== 'function' ? fs.read + : (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) + + fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync + : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) + + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } + + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + } + + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } + + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + + } else if (fs.futimes) { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } + } + + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } + } + + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + return stats; + } + } + + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true + + if (er.code === "ENOSYS") + return true + + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } + + return false + } +} + + /***/ }), /***/ 6044: @@ -7303,308 +10420,143 @@ exports["default"] = humanId; /***/ }), -/***/ 9471: -/***/ (function(module) { +/***/ 5822: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// -// THIS FILE IS AUTOMATICALLY GENERATED! DO NOT EDIT BY HAND! -// -; -(function (global, factory) { - true - ? module.exports = factory() - : 0; -}((typeof self !== 'undefined' ? self - : typeof window !== 'undefined' ? window - : typeof global !== 'undefined' ? global - : this), function () { - 'use strict'; - /** - * base64.ts - * - * Licensed under the BSD 3-Clause License. - * http://opensource.org/licenses/BSD-3-Clause - * - * References: - * http://en.wikipedia.org/wiki/Base64 - * - * @author Dan Kogai (https://github.com/dankogai) - */ - var version = '3.7.7'; - /** - * @deprecated use lowercase `version`. - */ - var VERSION = version; - var _hasBuffer = typeof Buffer === 'function'; - var _TD = typeof TextDecoder === 'function' ? new TextDecoder() : undefined; - var _TE = typeof TextEncoder === 'function' ? new TextEncoder() : undefined; - var b64ch = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; - var b64chs = Array.prototype.slice.call(b64ch); - var b64tab = (function (a) { - var tab = {}; - a.forEach(function (c, i) { return tab[c] = i; }); - return tab; - })(b64chs); - var b64re = /^(?:[A-Za-z\d+\/]{4})*?(?:[A-Za-z\d+\/]{2}(?:==)?|[A-Za-z\d+\/]{3}=?)?$/; - var _fromCC = String.fromCharCode.bind(String); - var _U8Afrom = typeof Uint8Array.from === 'function' - ? Uint8Array.from.bind(Uint8Array) - : function (it) { return new Uint8Array(Array.prototype.slice.call(it, 0)); }; - var _mkUriSafe = function (src) { return src - .replace(/=/g, '').replace(/[+\/]/g, function (m0) { return m0 == '+' ? '-' : '_'; }); }; - var _tidyB64 = function (s) { return s.replace(/[^A-Za-z0-9\+\/]/g, ''); }; - /** - * polyfill version of `btoa` - */ - var btoaPolyfill = function (bin) { - // console.log('polyfilled'); - var u32, c0, c1, c2, asc = ''; - var pad = bin.length % 3; - for (var i = 0; i < bin.length;) { - if ((c0 = bin.charCodeAt(i++)) > 255 || - (c1 = bin.charCodeAt(i++)) > 255 || - (c2 = bin.charCodeAt(i++)) > 255) - throw new TypeError('invalid character found'); - u32 = (c0 << 16) | (c1 << 8) | c2; - asc += b64chs[u32 >> 18 & 63] - + b64chs[u32 >> 12 & 63] - + b64chs[u32 >> 6 & 63] - + b64chs[u32 & 63]; - } - return pad ? asc.slice(0, pad - 3) + "===".substring(pad) : asc; - }; - /** - * does what `window.btoa` of web browsers do. - * @param {String} bin binary string - * @returns {string} Base64-encoded string - */ - var _btoa = typeof btoa === 'function' ? function (bin) { return btoa(bin); } - : _hasBuffer ? function (bin) { return Buffer.from(bin, 'binary').toString('base64'); } - : btoaPolyfill; - var _fromUint8Array = _hasBuffer - ? function (u8a) { return Buffer.from(u8a).toString('base64'); } - : function (u8a) { - // cf. https://stackoverflow.com/questions/12710001/how-to-convert-uint8-array-to-base64-encoded-string/12713326#12713326 - var maxargs = 0x1000; - var strs = []; - for (var i = 0, l = u8a.length; i < l; i += maxargs) { - strs.push(_fromCC.apply(null, u8a.subarray(i, i + maxargs))); - } - return _btoa(strs.join('')); - }; - /** - * converts a Uint8Array to a Base64 string. - * @param {boolean} [urlsafe] URL-and-filename-safe a la RFC4648 §5 - * @returns {string} Base64 string - */ - var fromUint8Array = function (u8a, urlsafe) { - if (urlsafe === void 0) { urlsafe = false; } - return urlsafe ? _mkUriSafe(_fromUint8Array(u8a)) : _fromUint8Array(u8a); - }; - // This trick is found broken https://github.com/dankogai/js-base64/issues/130 - // const utob = (src: string) => unescape(encodeURIComponent(src)); - // reverting good old fationed regexp - var cb_utob = function (c) { - if (c.length < 2) { - var cc = c.charCodeAt(0); - return cc < 0x80 ? c - : cc < 0x800 ? (_fromCC(0xc0 | (cc >>> 6)) - + _fromCC(0x80 | (cc & 0x3f))) - : (_fromCC(0xe0 | ((cc >>> 12) & 0x0f)) - + _fromCC(0x80 | ((cc >>> 6) & 0x3f)) - + _fromCC(0x80 | (cc & 0x3f))); - } - else { - var cc = 0x10000 - + (c.charCodeAt(0) - 0xD800) * 0x400 - + (c.charCodeAt(1) - 0xDC00); - return (_fromCC(0xf0 | ((cc >>> 18) & 0x07)) - + _fromCC(0x80 | ((cc >>> 12) & 0x3f)) - + _fromCC(0x80 | ((cc >>> 6) & 0x3f)) - + _fromCC(0x80 | (cc & 0x3f))); - } - }; - var re_utob = /[\uD800-\uDBFF][\uDC00-\uDFFFF]|[^\x00-\x7F]/g; - /** - * @deprecated should have been internal use only. - * @param {string} src UTF-8 string - * @returns {string} UTF-16 string - */ - var utob = function (u) { return u.replace(re_utob, cb_utob); }; - // - var _encode = _hasBuffer - ? function (s) { return Buffer.from(s, 'utf8').toString('base64'); } - : _TE - ? function (s) { return _fromUint8Array(_TE.encode(s)); } - : function (s) { return _btoa(utob(s)); }; - /** - * converts a UTF-8-encoded string to a Base64 string. - * @param {boolean} [urlsafe] if `true` make the result URL-safe - * @returns {string} Base64 string - */ - var encode = function (src, urlsafe) { - if (urlsafe === void 0) { urlsafe = false; } - return urlsafe - ? _mkUriSafe(_encode(src)) - : _encode(src); - }; - /** - * converts a UTF-8-encoded string to URL-safe Base64 RFC4648 §5. - * @returns {string} Base64 string - */ - var encodeURI = function (src) { return encode(src, true); }; - // This trick is found broken https://github.com/dankogai/js-base64/issues/130 - // const btou = (src: string) => decodeURIComponent(escape(src)); - // reverting good old fationed regexp - var re_btou = /[\xC0-\xDF][\x80-\xBF]|[\xE0-\xEF][\x80-\xBF]{2}|[\xF0-\xF7][\x80-\xBF]{3}/g; - var cb_btou = function (cccc) { - switch (cccc.length) { - case 4: - var cp = ((0x07 & cccc.charCodeAt(0)) << 18) - | ((0x3f & cccc.charCodeAt(1)) << 12) - | ((0x3f & cccc.charCodeAt(2)) << 6) - | (0x3f & cccc.charCodeAt(3)), offset = cp - 0x10000; - return (_fromCC((offset >>> 10) + 0xD800) - + _fromCC((offset & 0x3FF) + 0xDC00)); - case 3: - return _fromCC(((0x0f & cccc.charCodeAt(0)) << 12) - | ((0x3f & cccc.charCodeAt(1)) << 6) - | (0x3f & cccc.charCodeAt(2))); - default: - return _fromCC(((0x1f & cccc.charCodeAt(0)) << 6) - | (0x3f & cccc.charCodeAt(1))); - } - }; - /** - * @deprecated should have been internal use only. - * @param {string} src UTF-16 string - * @returns {string} UTF-8 string - */ - var btou = function (b) { return b.replace(re_btou, cb_btou); }; - /** - * polyfill version of `atob` - */ - var atobPolyfill = function (asc) { - // console.log('polyfilled'); - asc = asc.replace(/\s+/g, ''); - if (!b64re.test(asc)) - throw new TypeError('malformed base64.'); - asc += '=='.slice(2 - (asc.length & 3)); - var u24, bin = '', r1, r2; - for (var i = 0; i < asc.length;) { - u24 = b64tab[asc.charAt(i++)] << 18 - | b64tab[asc.charAt(i++)] << 12 - | (r1 = b64tab[asc.charAt(i++)]) << 6 - | (r2 = b64tab[asc.charAt(i++)]); - bin += r1 === 64 ? _fromCC(u24 >> 16 & 255) - : r2 === 64 ? _fromCC(u24 >> 16 & 255, u24 >> 8 & 255) - : _fromCC(u24 >> 16 & 255, u24 >> 8 & 255, u24 & 255); - } - return bin; - }; - /** - * does what `window.atob` of web browsers do. - * @param {String} asc Base64-encoded string - * @returns {string} binary string - */ - var _atob = typeof atob === 'function' ? function (asc) { return atob(_tidyB64(asc)); } - : _hasBuffer ? function (asc) { return Buffer.from(asc, 'base64').toString('binary'); } - : atobPolyfill; - // - var _toUint8Array = _hasBuffer - ? function (a) { return _U8Afrom(Buffer.from(a, 'base64')); } - : function (a) { return _U8Afrom(_atob(a).split('').map(function (c) { return c.charCodeAt(0); })); }; - /** - * converts a Base64 string to a Uint8Array. - */ - var toUint8Array = function (a) { return _toUint8Array(_unURI(a)); }; - // - var _decode = _hasBuffer - ? function (a) { return Buffer.from(a, 'base64').toString('utf8'); } - : _TD - ? function (a) { return _TD.decode(_toUint8Array(a)); } - : function (a) { return btou(_atob(a)); }; - var _unURI = function (a) { return _tidyB64(a.replace(/[-_]/g, function (m0) { return m0 == '-' ? '+' : '/'; })); }; - /** - * converts a Base64 string to a UTF-8 string. - * @param {String} src Base64 string. Both normal and URL-safe are supported - * @returns {string} UTF-8 string - */ - var decode = function (src) { return _decode(_unURI(src)); }; - /** - * check if a value is a valid Base64 string - * @param {String} src a value to check - */ - var isValid = function (src) { - if (typeof src !== 'string') - return false; - var s = src.replace(/\s+/g, '').replace(/={0,2}$/, ''); - return !/[^\s0-9a-zA-Z\+/]/.test(s) || !/[^\s0-9a-zA-Z\-_]/.test(s); - }; - // - var _noEnum = function (v) { - return { - value: v, enumerable: false, writable: true, configurable: true - }; - }; - /** - * extend String.prototype with relevant methods - */ - var extendString = function () { - var _add = function (name, body) { return Object.defineProperty(String.prototype, name, _noEnum(body)); }; - _add('fromBase64', function () { return decode(this); }); - _add('toBase64', function (urlsafe) { return encode(this, urlsafe); }); - _add('toBase64URI', function () { return encode(this, true); }); - _add('toBase64URL', function () { return encode(this, true); }); - _add('toUint8Array', function () { return toUint8Array(this); }); - }; - /** - * extend Uint8Array.prototype with relevant methods - */ - var extendUint8Array = function () { - var _add = function (name, body) { return Object.defineProperty(Uint8Array.prototype, name, _noEnum(body)); }; - _add('toBase64', function (urlsafe) { return fromUint8Array(this, urlsafe); }); - _add('toBase64URI', function () { return fromUint8Array(this, true); }); - _add('toBase64URL', function () { return fromUint8Array(this, true); }); - }; - /** - * extend Builtin prototypes with relevant methods - */ - var extendBuiltins = function () { - extendString(); - extendUint8Array(); - }; - var gBase64 = { - version: version, - VERSION: VERSION, - atob: _atob, - atobPolyfill: atobPolyfill, - btoa: _btoa, - btoaPolyfill: btoaPolyfill, - fromBase64: decode, - toBase64: encode, - encode: encode, - encodeURI: encodeURI, - encodeURL: encodeURI, - utob: utob, - btou: btou, - decode: decode, - isValid: isValid, - fromUint8Array: fromUint8Array, - toUint8Array: toUint8Array, - extendString: extendString, - extendUint8Array: extendUint8Array, - extendBuiltins: extendBuiltins - }; - // - // export Base64 to the namespace - // - // ES5 is yet to have Object.assign() that may make transpilers unhappy. - // gBase64.Base64 = Object.assign({}, gBase64); - gBase64.Base64 = {}; - Object.keys(gBase64).forEach(function (k) { return gBase64.Base64[k] = gBase64[k]; }); - return gBase64; -})); +var _fs +try { + _fs = __nccwpck_require__(494) +} catch (_) { + _fs = __nccwpck_require__(7147) +} + +function readFile (file, options, callback) { + if (callback == null) { + callback = options + options = {} + } + + if (typeof options === 'string') { + options = {encoding: options} + } + + options = options || {} + var fs = options.fs || _fs + + var shouldThrow = true + if ('throws' in options) { + shouldThrow = options.throws + } + + fs.readFile(file, options, function (err, data) { + if (err) return callback(err) + + data = stripBom(data) + + var obj + try { + obj = JSON.parse(data, options ? options.reviver : null) + } catch (err2) { + if (shouldThrow) { + err2.message = file + ': ' + err2.message + return callback(err2) + } else { + return callback(null, null) + } + } + + callback(null, obj) + }) +} + +function readFileSync (file, options) { + options = options || {} + if (typeof options === 'string') { + options = {encoding: options} + } + + var fs = options.fs || _fs + + var shouldThrow = true + if ('throws' in options) { + shouldThrow = options.throws + } + + try { + var content = fs.readFileSync(file, options) + content = stripBom(content) + return JSON.parse(content, options.reviver) + } catch (err) { + if (shouldThrow) { + err.message = file + ': ' + err.message + throw err + } else { + return null + } + } +} + +function stringify (obj, options) { + var spaces + var EOL = '\n' + if (typeof options === 'object' && options !== null) { + if (options.spaces) { + spaces = options.spaces + } + if (options.EOL) { + EOL = options.EOL + } + } + + var str = JSON.stringify(obj, options ? options.replacer : null, spaces) + + return str.replace(/\n/g, EOL) + EOL +} + +function writeFile (file, obj, options, callback) { + if (callback == null) { + callback = options + options = {} + } + options = options || {} + var fs = options.fs || _fs + + var str = '' + try { + str = stringify(obj, options) + } catch (err) { + // Need to return whether a callback was passed or not + if (callback) callback(err, null) + return + } + + fs.writeFile(file, str, options, callback) +} + +function writeFileSync (file, obj, options) { + options = options || {} + var fs = options.fs || _fs + + var str = stringify(obj, options) + // not sure if fs.writeFileSync returns anything, but just in case + return fs.writeFileSync(file, str, options) +} + +function stripBom (content) { + // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified + if (Buffer.isBuffer(content)) content = content.toString('utf8') + content = content.replace(/^\uFEFF/, '') + return content +} + +var jsonfile = { + readFile: readFile, + readFileSync: readFileSync, + writeFile: writeFile, + writeFileSync: writeFileSync +} + +module.exports = jsonfile /***/ }), @@ -30122,6 +33074,39 @@ exports.getUserAgent = getUserAgent; //# sourceMappingURL=index.js.map +/***/ }), + +/***/ 6350: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +exports.E = function (fn) { + return Object.defineProperty(function () { + if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments) + else { + return new Promise((resolve, reject) => { + arguments[arguments.length] = (err, res) => { + if (err) return reject(err) + resolve(res) + } + arguments.length++ + fn.apply(this, arguments) + }) + } + }, 'name', { value: fn.name }) +} + +exports.p = function (fn) { + return Object.defineProperty(function () { + const cb = arguments[arguments.length - 1] + if (typeof cb !== 'function') return fn.apply(this, arguments) + else fn.apply(this, arguments).then(r => cb(null, r), cb) + }, 'name', { value: fn.name }) +} + + /***/ }), /***/ 4187: @@ -30840,14 +33825,13 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); const core = __nccwpck_require__(6108); const exec_1 = __nccwpck_require__(9629); const github = __nccwpck_require__(1645); -const js_base64_1 = __nccwpck_require__(9471); const utils_1 = __nccwpck_require__(3927); const constants_1 = __nccwpck_require__(9562); const changeset_1 = __nccwpck_require__(6621); const file_1 = __nccwpck_require__(398); function main() { return __awaiter(this, void 0, void 0, function* () { - var _a, _b; + var _a, _b, _c; const context = github.context; const { pull_request } = context.payload; if (!pull_request) { @@ -30883,8 +33867,9 @@ function main() { /** * 변경된 파일 이름을 가져오기위한 api */ - const rawPackagesDir = core.getInput('packages_dir'); - if (typeof rawPackagesDir !== 'string') { + const packages_dir = core.getInput('packages_dir'); + const excludes = (_c = core.getInput('excludes')) !== null && _c !== void 0 ? _c : ''; + if (typeof packages_dir !== 'string') { throw new Error(`해당 action에 주입된 packages_dir parameter가 잘못되었습니다. (string, string1)의 형식으로 작성해주세요.`); } // 변경된 모든 파일을 가져온다. @@ -30912,12 +33897,14 @@ function main() { } // 변경된 파일들중에 packagesDir 내부에 파일만 골라낸다. (이때 .md 는 무시한다) // TODO(new-jeans): 확장자를 외부에서 받아서 필터링 할 수 있도록 - const changedPackageInfos = yield (0, file_1.getChangedPackages)({ - allChangedFiles, - packagesDir: rawPackagesDir.split(',').map((v) => v.trim()), + // 변경된 파일들중에 packagesDir 내부에 파일만 골라낸다. (이때 .md 는 무시한다) + const changedPackages = yield (0, file_1.getChangedPackages)({ + changedFiles: allChangedFiles, + packagesDir: packages_dir.split(',').map((v) => v.trim()), + excludes: excludes.split(','), }); // 변경된 패키지가 없다면 Empty 메시지를 남긴다. - if (changedPackageInfos.length === 0) { + if (changedPackages.length === 0) { if (prevComment !== undefined) { yield octokit.rest.issues.updateComment({ owner, @@ -30937,36 +33924,9 @@ function main() { } return; } - // 변경된 패키지들의 package.json 내부의 정의된 이름들을 가져온다. - const changedPackageNames = yield Promise.all(changedPackageInfos.map((_a) => __awaiter(this, [_a], void 0, function* ({ packageName, packageJsonPath }) { - var _b; - // 삭제된 패키지의 경우 package.json 의 name 을 가져올 수 없기 때문에 packageName(폴더명) 을 fallback 으로 사용한다. - if (packageJsonPath == null) { - return packageName; - } - try { - const { data } = yield octokit.rest.repos.getContent({ - owner, - repo, - path: packageJsonPath, - ref: ((_b = pull_request === null || pull_request === void 0 ? void 0 : pull_request.head) === null || _b === void 0 ? void 0 : _b.ref) || 'master', - }); - if ('content' in data) { - const { name } = JSON.parse((0, js_base64_1.decode)(data.content)); - return name; - } - else { - return packageName; - } - } - catch (e) { - core.info(`[ERROR] ${packageName}의 package.json의 name 필드를 찾을 수 없습니다.`); - return packageName; - } - }))); // 변경된 패키지들의 정보를 바탕으로 메시지를 생성한다. const comment = (0, changeset_1.getChangedPackagesGithubComment)({ - changedPackageNames, + changedPackages, pullRequest: pull_request, skipLabel, }); @@ -31033,19 +33993,19 @@ function getAddChangesetUrl(changedPackageNames, pull_request, versionType) { const commitMessage = `[${versionType}] ${fileName}`; return `${pull_request.head.repo.html_url}/new/${pull_request.head.ref}?filename=.changeset/${fileName}.md&value=${getNewChangesetTemplate(changedPackageNames, pull_request.title, pull_request.html_url || '', versionType)}&message=${encodeURIComponent(commitMessage)}`; } -function getChangedPackagesGithubComment({ changedPackageNames, pullRequest, skipLabel, }) { +function getChangedPackagesGithubComment({ changedPackages, pullRequest, skipLabel, }) { return [ `> ${constants_1.CHANGESET_DETECT_ADD_ACTIONS_CHECKSUM}`, '', - `\`${changedPackageNames.join('`, `')}\` 패키지${changedPackageNames.length > 1 ? '들' : ''}에 변경사항이 감지되었습니다.`, + `\`${changedPackages.join('`, `')}\` 패키지${changedPackages.length > 1 ? '들' : ''}에 변경사항이 감지되었습니다.`, '', `${skipLabel != null ? `만약, 버전 변경이 필요 없다면 ${skipLabel}을 label에 추가해주세요.` : ''}`, '', '.changeset에 변경사항을 추가하고싶다면 아래에서 하나를 선택해주세요', '', - `X.0.0 [major bump](${getAddChangesetUrl(changedPackageNames, pullRequest, 'major')})`, - `0.X.0 [minor bump](${getAddChangesetUrl(changedPackageNames, pullRequest, 'minor')})`, - `0.0.X [patch bump](${getAddChangesetUrl(changedPackageNames, pullRequest, 'patch')})`, + `X.0.0 [major bump](${getAddChangesetUrl(changedPackages, pullRequest, 'major')})`, + `0.X.0 [minor bump](${getAddChangesetUrl(changedPackages, pullRequest, 'minor')})`, + `0.0.X [patch bump](${getAddChangesetUrl(changedPackages, pullRequest, 'patch')})`, ].join('\n'); } function getChangesetEmptyGithubComment() { @@ -31076,21 +34036,34 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getChangedPackages = getChangedPackages; +const fs_extra_1 = __nccwpck_require__(77); const utils_1 = __nccwpck_require__(3927); function getChangedPackages(_a) { - return __awaiter(this, arguments, void 0, function* ({ packagesDir, allChangedFiles, }) { - const changedPackages = allChangedFiles - .filter(({ filename }) => { - const isTargetDirectories = packagesDir.some((packageDir) => filename.includes(`${packageDir}/`)); - const isMarkdownFile = filename.endsWith('.md'); - return isTargetDirectories && !isMarkdownFile; - }) - .map(({ filename, status }) => { - const [packageRoot, packageName] = filename.split('/'); - const packageJsonPath = status !== 'removed' ? [packageRoot, packageName, 'package.json'].join('/') : undefined; - return { packageName, packageJsonPath }; - }); - return (0, utils_1.uniqBy)(changedPackages, ({ packageName }) => packageName); + return __awaiter(this, arguments, void 0, function* ({ packagesDir, changedFiles, excludes, }) { + const isIncludedRoot = packagesDir.includes('.') === true; + const targetDirectories = packagesDir.filter((packagename) => packagename !== '.'); + const changedPackages = changedFiles.reduce((acc, { filename, status }) => { + if (status === 'removed') { + return acc; + } + const 패키지대상인가 = isIncludedRoot || targetDirectories.some((packageDir) => filename.includes(`${packageDir}/`)); + // TODO: 제외 확장자도 받을 수 있도록 + const 마크다운파일인가 = filename.endsWith('.md'); + const 제외대상인가 = excludes.some((exclude) => { + return filename === exclude || filename.startsWith(`${exclude}`); + }); + if (패키지대상인가 && !마크다운파일인가 && !제외대상인가) { + const packageJsonPath = isIncludedRoot ? 'package.json' : (0, utils_1.findNearestPackageJson)(filename); + if (packageJsonPath != null) { + const packageJsonData = fs_extra_1.default.readFileSync(packageJsonPath, 'utf-8'); + const packageJson = JSON.parse(packageJsonData); + acc.add(packageJson.name); + } + } + return acc; + }, new Set()); + console.log('필터링된 packages', Array.from(changedPackages)); // eslint-disable-line + return Array.from(changedPackages); }); } @@ -31120,8 +34093,11 @@ var __asyncValues = (this && this.__asyncValues) || function (o) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getChangedAllFiles = getChangedAllFiles; +exports.findNearestPackageJson = findNearestPackageJson; +const path_1 = __nccwpck_require__(1017); const core = __nccwpck_require__(6108); const github = __nccwpck_require__(1645); +const fs_extra_1 = __nccwpck_require__(77); const utils_1 = __nccwpck_require__(3927); function getChangedAllFiles(_a) { return __awaiter(this, arguments, void 0, function* ({ pullNumber }) { @@ -31153,6 +34129,17 @@ function getChangedAllFiles(_a) { return changedFiles; }); } +function findNearestPackageJson(filePath) { + let currentDir = path_1.default.dirname(filePath); + while (currentDir !== path_1.default.parse(currentDir).root) { + const packageJsonPath = path_1.default.join(currentDir, 'package.json'); + if (fs_extra_1.default.existsSync(packageJsonPath)) { + return packageJsonPath; + } + currentDir = path_1.default.dirname(currentDir); + } + return undefined; +} /***/ }), @@ -31316,6 +34303,14 @@ module.exports = require("console"); /***/ }), +/***/ 2057: +/***/ ((module) => { + +"use strict"; +module.exports = require("constants"); + +/***/ }), + /***/ 6113: /***/ ((module) => { diff --git a/detect-add/package.json b/detect-add/package.json index 8a2440e..4cf621b 100644 --- a/detect-add/package.json +++ b/detect-add/package.json @@ -13,7 +13,6 @@ }, "dependencies": { "@vercel/ncc": "^0.38.1", - "js-base64": "^3.7.5", "human-id": "^4.0.0" }, "devDependencies": { diff --git a/detect-add/src/index.ts b/detect-add/src/index.ts index 6eb2af6..07ca5ec 100644 --- a/detect-add/src/index.ts +++ b/detect-add/src/index.ts @@ -1,7 +1,6 @@ import * as core from '@actions/core' import {exec} from '@actions/exec' import * as github from '@actions/github' -import {decode} from 'js-base64' import {commitAll, getChangedAllFiles, getOctokitRestCommonParams, push} from '$actions/utils' @@ -64,9 +63,10 @@ async function main() { * 변경된 파일 이름을 가져오기위한 api */ - const rawPackagesDir = core.getInput('packages_dir') + const packages_dir = core.getInput('packages_dir') + const excludes = core.getInput('excludes') ?? '' - if (typeof rawPackagesDir !== 'string') { + if (typeof packages_dir !== 'string') { throw new Error( `해당 action에 주입된 packages_dir parameter가 잘못되었습니다. (string, string1)의 형식으로 작성해주세요.`, ) @@ -102,13 +102,15 @@ async function main() { // 변경된 파일들중에 packagesDir 내부에 파일만 골라낸다. (이때 .md 는 무시한다) // TODO(new-jeans): 확장자를 외부에서 받아서 필터링 할 수 있도록 - const changedPackageInfos = await getChangedPackages({ - allChangedFiles, - packagesDir: rawPackagesDir.split(',').map((v) => v.trim()) as string[], + // 변경된 파일들중에 packagesDir 내부에 파일만 골라낸다. (이때 .md 는 무시한다) + const changedPackages = await getChangedPackages({ + changedFiles: allChangedFiles, + packagesDir: packages_dir.split(',').map((v) => v.trim()) as string[], + excludes: excludes.split(',') as string[], }) // 변경된 패키지가 없다면 Empty 메시지를 남긴다. - if (changedPackageInfos.length === 0) { + if (changedPackages.length === 0) { if (prevComment !== undefined) { await octokit.rest.issues.updateComment({ owner, @@ -129,37 +131,9 @@ async function main() { return } - // 변경된 패키지들의 package.json 내부의 정의된 이름들을 가져온다. - const changedPackageNames = await Promise.all( - changedPackageInfos.map(async ({packageName, packageJsonPath}) => { - // 삭제된 패키지의 경우 package.json 의 name 을 가져올 수 없기 때문에 packageName(폴더명) 을 fallback 으로 사용한다. - if (packageJsonPath == null) { - return packageName - } - - try { - const {data} = await octokit.rest.repos.getContent({ - owner, - repo, - path: packageJsonPath, - ref: pull_request?.head?.ref || 'master', - }) - if ('content' in data) { - const {name} = JSON.parse(decode(data.content)) as {name: string} - return name - } else { - return packageName - } - } catch (e) { - core.info(`[ERROR] ${packageName}의 package.json의 name 필드를 찾을 수 없습니다.`) - return packageName - } - }), - ) - // 변경된 패키지들의 정보를 바탕으로 메시지를 생성한다. const comment = getChangedPackagesGithubComment({ - changedPackageNames, + changedPackages, pullRequest: pull_request, skipLabel, }) diff --git a/detect-add/src/utils/changeset.ts b/detect-add/src/utils/changeset.ts index 296ffb3..78155c4 100644 --- a/detect-add/src/utils/changeset.ts +++ b/detect-add/src/utils/changeset.ts @@ -52,11 +52,11 @@ export function getAddChangesetUrl( } export function getChangedPackagesGithubComment({ - changedPackageNames, + changedPackages, pullRequest, skipLabel, }: { - changedPackageNames: string[] + changedPackages: string[] pullRequest: { [key: string]: any // eslint-disable-line @typescript-eslint/no-explicit-any number: number @@ -68,17 +68,17 @@ export function getChangedPackagesGithubComment({ return [ `> ${CHANGESET_DETECT_ADD_ACTIONS_CHECKSUM}`, '', - `\`${changedPackageNames.join('`, `')}\` 패키지${ - changedPackageNames.length > 1 ? '들' : '' + `\`${changedPackages.join('`, `')}\` 패키지${ + changedPackages.length > 1 ? '들' : '' }에 변경사항이 감지되었습니다.`, '', `${skipLabel != null ? `만약, 버전 변경이 필요 없다면 ${skipLabel}을 label에 추가해주세요.` : ''}`, '', '.changeset에 변경사항을 추가하고싶다면 아래에서 하나를 선택해주세요', '', - `X.0.0 [major bump](${getAddChangesetUrl(changedPackageNames, pullRequest, 'major')})`, - `0.X.0 [minor bump](${getAddChangesetUrl(changedPackageNames, pullRequest, 'minor')})`, - `0.0.X [patch bump](${getAddChangesetUrl(changedPackageNames, pullRequest, 'patch')})`, + `X.0.0 [major bump](${getAddChangesetUrl(changedPackages, pullRequest, 'major')})`, + `0.X.0 [minor bump](${getAddChangesetUrl(changedPackages, pullRequest, 'minor')})`, + `0.0.X [patch bump](${getAddChangesetUrl(changedPackages, pullRequest, 'patch')})`, ].join('\n') } diff --git a/detect-add/src/utils/file.ts b/detect-add/src/utils/file.ts index c7e1c12..767cc13 100644 --- a/detect-add/src/utils/file.ts +++ b/detect-add/src/utils/file.ts @@ -1,26 +1,47 @@ -import {getChangedAllFiles, uniqBy} from '$actions/utils' +import fs from 'fs-extra' + +import {findNearestPackageJson, getChangedAllFiles} from '$actions/utils' export async function getChangedPackages({ packagesDir, - allChangedFiles, + changedFiles, + excludes, }: { - allChangedFiles: Awaited> + changedFiles: Awaited> packagesDir: string[] + excludes: string[] }) { - const changedPackages = allChangedFiles - .filter(({filename}) => { - const isTargetDirectories = packagesDir.some((packageDir) => filename.includes(`${packageDir}/`)) - const isMarkdownFile = filename.endsWith('.md') - return isTargetDirectories && !isMarkdownFile - }) - .map(({filename, status}) => { - const [packageRoot, packageName] = filename.split('/') + const isIncludedRoot = packagesDir.includes('.') === true + const targetDirectories = packagesDir.filter((packagename) => packagename !== '.') - const packageJsonPath = - status !== 'removed' ? [packageRoot, packageName, 'package.json'].join('/') : undefined + const changedPackages = changedFiles.reduce((acc, {filename, status}) => { + if (status === 'removed') { + return acc + } - return {packageName, packageJsonPath} + const 패키지대상인가 = + isIncludedRoot || targetDirectories.some((packageDir) => filename.includes(`${packageDir}/`)) + // TODO: 제외 확장자도 받을 수 있도록 + const 마크다운파일인가 = filename.endsWith('.md') + const 제외대상인가 = excludes.some((exclude) => { + return filename === exclude || filename.startsWith(`${exclude}`) }) - return uniqBy(changedPackages, ({packageName}) => packageName) + if (패키지대상인가 && !마크다운파일인가 && !제외대상인가) { + const packageJsonPath = isIncludedRoot ? 'package.json' : findNearestPackageJson(filename) + + if (packageJsonPath != null) { + const packageJsonData = fs.readFileSync(packageJsonPath, 'utf-8') + const packageJson = JSON.parse(packageJsonData) + + acc.add(packageJson.name) + } + } + + return acc + }, new Set()) + + console.log('필터링된 packages', Array.from(changedPackages)) // eslint-disable-line + + return Array.from(changedPackages) } diff --git a/libs/utils/files.ts b/libs/utils/files.ts index e18aea4..db0cf3d 100644 --- a/libs/utils/files.ts +++ b/libs/utils/files.ts @@ -1,5 +1,8 @@ +import path from 'path' + import * as core from '@actions/core' import * as github from '@actions/github' +import fs from 'fs-extra' import {getOctokitRestCommonParams} from '$actions/utils' @@ -21,3 +24,19 @@ export async function getChangedAllFiles({pullNumber}: {pullNumber: number}) { return changedFiles } + +export function findNearestPackageJson(filePath: string) { + let currentDir = path.dirname(filePath) + + while (currentDir !== path.parse(currentDir).root) { + const packageJsonPath = path.join(currentDir, 'package.json') + + if (fs.existsSync(packageJsonPath)) { + return packageJsonPath + } + + currentDir = path.dirname(currentDir) + } + + return undefined +} diff --git a/package.json b/package.json index 619e24f..408094c 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,8 @@ "lint-staged": "^15.0.1", "turbo": "^1.10.16", "typescript": "^5.2.2", - "@types/node": "^20.14.9" + "@types/node": "^20.14.9", + "@types/fs-extra": "^8.0.0" }, "packageManager": "pnpm@9.4.0", "dependencies": { @@ -43,6 +44,7 @@ "@naverpay/commit-helper": "^1.0.0", "@vercel/ncc": "^0.38.1", "lefthook": "^1.6.10", + "fs-extra": "^8.1.0", "@actions/exec": "^1.1.1" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a2c4858..d96152d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -23,6 +23,9 @@ importers: '@vercel/ncc': specifier: ^0.38.1 version: 0.38.1 + fs-extra: + specifier: ^8.1.0 + version: 8.1.0 lefthook: specifier: ^1.6.10 version: 1.7.15 @@ -39,6 +42,9 @@ importers: '@naverpay/prettier-config': specifier: ^0.0.2 version: 0.0.2 + '@types/fs-extra': + specifier: ^8.0.0 + version: 8.1.5 '@types/node': specifier: ^20.14.9 version: 20.16.5 @@ -66,16 +72,10 @@ importers: fast-glob: specifier: ^3.3.2 version: 3.3.2 - fs-extra: - specifier: ^8.1.0 - version: 8.1.0 resolve-from: specifier: ^5.0.0 version: 5.0.0 devDependencies: - '@types/fs-extra': - specifier: ^8.0.0 - version: 8.1.5 '@types/node': specifier: ^20.14.9 version: 20.16.5 @@ -104,9 +104,6 @@ importers: '@manypkg/get-packages': specifier: ^1.1.3 version: 1.1.3 - fs-extra: - specifier: ^11.2.0 - version: 11.2.0 mdast-util-to-string: specifier: ^4.0.0 version: 4.0.0 @@ -123,9 +120,6 @@ importers: specifier: ^8.4.2 version: 8.4.2 devDependencies: - '@types/fs-extra': - specifier: ^11.0.4 - version: 11.0.4 '@types/node': specifier: ^20.14.9 version: 20.16.5 @@ -521,9 +515,6 @@ packages: resolution: {integrity: sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==} engines: {node: '>=18'} - '@types/fs-extra@11.0.4': - resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} - '@types/fs-extra@8.1.5': resolution: {integrity: sha512-0dzKcwO+S8s2kuF5Z9oUWatQJj5Uq/iqphEtE3GQJVRRYm/tD1LglU2UnXi2A8jLq5umkGouOXOR9y0n613ZwQ==} @@ -533,9 +524,6 @@ packages: '@types/json5@0.0.29': resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - '@types/jsonfile@6.1.4': - resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} - '@types/mdast@4.0.4': resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} @@ -1290,10 +1278,6 @@ packages: resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==} engines: {node: '>=14'} - fs-extra@11.2.0: - resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} - engines: {node: '>=14.14'} - fs-extra@7.0.1: resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} engines: {node: '>=6 <7 || >=8'} @@ -1687,9 +1671,6 @@ packages: jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} - jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - jsx-ast-utils@3.3.5: resolution: {integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==} engines: {node: '>=4.0'} @@ -2489,10 +2470,6 @@ packages: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} - universalify@2.0.1: - resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} - engines: {node: '>= 10.0.0'} - update-browserslist-db@1.1.0: resolution: {integrity: sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==} hasBin: true @@ -3155,11 +3132,6 @@ snapshots: '@sindresorhus/merge-streams@1.0.0': {} - '@types/fs-extra@11.0.4': - dependencies: - '@types/jsonfile': 6.1.4 - '@types/node': 20.16.5 - '@types/fs-extra@8.1.5': dependencies: '@types/node': 20.16.5 @@ -3168,10 +3140,6 @@ snapshots: '@types/json5@0.0.29': {} - '@types/jsonfile@6.1.4': - dependencies: - '@types/node': 20.16.5 - '@types/mdast@4.0.4': dependencies: '@types/unist': 3.0.3 @@ -4110,12 +4078,6 @@ snapshots: cross-spawn: 7.0.3 signal-exit: 4.1.0 - fs-extra@11.2.0: - dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - fs-extra@7.0.1: dependencies: graceful-fs: 4.2.11 @@ -4478,12 +4440,6 @@ snapshots: optionalDependencies: graceful-fs: 4.2.11 - jsonfile@6.1.0: - dependencies: - universalify: 2.0.1 - optionalDependencies: - graceful-fs: 4.2.11 - jsx-ast-utils@3.3.5: dependencies: array-includes: 3.1.8 @@ -5324,8 +5280,6 @@ snapshots: universalify@0.1.2: {} - universalify@2.0.1: {} - update-browserslist-db@1.1.0(browserslist@4.23.3): dependencies: browserslist: 4.23.3 diff --git a/publish/dist/index.js b/publish/dist/index.js index 3852589..3cd01c4 100644 --- a/publish/dist/index.js +++ b/publish/dist/index.js @@ -13780,1605 +13780,6 @@ module.exports.sync.exists = pathExists.sync; module.exports.stop = stop; -/***/ }), - -/***/ 27907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30494) -const path = __nccwpck_require__(71017) -const mkdirsSync = (__nccwpck_require__(21946).mkdirsSync) -const utimesMillisSync = (__nccwpck_require__(15475).utimesMillisSync) -const stat = __nccwpck_require__(23600) - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0002' - ) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - if (opts.filter && !opts.filter(srcItem, destItem)) return - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) - return getStats(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync - - -/***/ }), - -/***/ 64197: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30201) -const path = __nccwpck_require__(71017) -const { mkdirs } = __nccwpck_require__(21946) -const { pathExists } = __nccwpck_require__(90271) -const { utimesMillis } = __nccwpck_require__(15475) -const stat = __nccwpck_require__(23600) - -async function copy (src, dest, opts = {}) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0001' - ) - } - - const { srcStat, destStat } = await stat.checkPaths(src, dest, 'copy', opts) - - await stat.checkParentPaths(src, srcStat, dest, 'copy') - - const include = await runFilter(src, dest, opts) - - if (!include) return - - // check if the parent of dest exists, and create it if it doesn't exist - const destParent = path.dirname(dest) - const dirExists = await pathExists(destParent) - if (!dirExists) { - await mkdirs(destParent) - } - - await getStatsAndPerformCopy(destStat, src, dest, opts) -} - -async function runFilter (src, dest, opts) { - if (!opts.filter) return true - return opts.filter(src, dest) -} - -async function getStatsAndPerformCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? fs.stat : fs.lstat - const srcStat = await statFn(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - - if ( - srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice() - ) return onFile(srcStat, destStat, src, dest, opts) - - if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -async function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - - if (opts.overwrite) { - await fs.unlink(dest) - return copyFile(srcStat, src, dest, opts) - } - if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -async function copyFile (srcStat, src, dest, opts) { - await fs.copyFile(src, dest) - if (opts.preserveTimestamps) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcStat.mode)) { - await makeFileWritable(dest, srcStat.mode) - } - - // Set timestamps and mode correspondingly - - // Note that The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await fs.stat(src) - await utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime) - } - - return fs.chmod(dest, srcStat.mode) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return fs.chmod(dest, srcMode | 0o200) -} - -async function onDir (srcStat, destStat, src, dest, opts) { - // the dest directory might not exist, create it - if (!destStat) { - await fs.mkdir(dest) - } - - const items = await fs.readdir(src) - - // loop through the files in the current directory to copy everything - await Promise.all(items.map(async item => { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - - // skip the item if it is matches by the filter function - const include = await runFilter(srcItem, destItem, opts) - if (!include) return - - const { destStat } = await stat.checkPaths(srcItem, destItem, 'copy', opts) - - // If the item is a copyable file, `getStatsAndPerformCopy` will copy it - // If the item is a directory, `getStatsAndPerformCopy` will call `onDir` recursively - return getStatsAndPerformCopy(destStat, srcItem, destItem, opts) - })) - - if (!destStat) { - await fs.chmod(dest, srcStat.mode) - } -} - -async function onLink (destStat, src, dest, opts) { - let resolvedSrc = await fs.readlink(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - if (!destStat) { - return fs.symlink(resolvedSrc, dest) - } - - let resolvedDest = null - try { - resolvedDest = await fs.readlink(dest) - } catch (e) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (e.code === 'EINVAL' || e.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest) - throw e - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - - // copy the link - await fs.unlink(dest) - return fs.symlink(resolvedSrc, dest) -} - -module.exports = copy - - -/***/ }), - -/***/ 27171: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -module.exports = { - copy: u(__nccwpck_require__(64197)), - copySync: __nccwpck_require__(27907) -} - - -/***/ }), - -/***/ 49945: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -const fs = __nccwpck_require__(30201) -const path = __nccwpck_require__(71017) -const mkdir = __nccwpck_require__(21946) -const remove = __nccwpck_require__(46179) - -const emptyDir = u(async function emptyDir (dir) { - let items - try { - items = await fs.readdir(dir) - } catch { - return mkdir.mkdirs(dir) - } - - return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) -}) - -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) - } - - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) -} - -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir -} - - -/***/ }), - -/***/ 32749: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -const path = __nccwpck_require__(71017) -const fs = __nccwpck_require__(30201) -const mkdir = __nccwpck_require__(21946) - -async function createFile (file) { - let stats - try { - stats = await fs.stat(file) - } catch { } - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - - let dirStats = null - try { - dirStats = await fs.stat(dir) - } catch (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - await mkdir.mkdirs(dir) - await fs.writeFile(file, '') - return - } else { - throw err - } - } - - if (dirStats.isDirectory()) { - await fs.writeFile(file, '') - } else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - await fs.readdir(dir) - } -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch { } - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} - - -/***/ }), - -/***/ 13157: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { createFile, createFileSync } = __nccwpck_require__(32749) -const { createLink, createLinkSync } = __nccwpck_require__(87413) -const { createSymlink, createSymlinkSync } = __nccwpck_require__(14720) - -module.exports = { - // file - createFile, - createFileSync, - ensureFile: createFile, - ensureFileSync: createFileSync, - // link - createLink, - createLinkSync, - ensureLink: createLink, - ensureLinkSync: createLinkSync, - // symlink - createSymlink, - createSymlinkSync, - ensureSymlink: createSymlink, - ensureSymlinkSync: createSymlinkSync -} - - -/***/ }), - -/***/ 87413: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -const path = __nccwpck_require__(71017) -const fs = __nccwpck_require__(30201) -const mkdir = __nccwpck_require__(21946) -const { pathExists } = __nccwpck_require__(90271) -const { areIdentical } = __nccwpck_require__(23600) - -async function createLink (srcpath, dstpath) { - let dstStat - try { - dstStat = await fs.lstat(dstpath) - } catch { - // ignore error - } - - let srcStat - try { - srcStat = await fs.lstat(srcpath) - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - if (dstStat && areIdentical(srcStat, dstStat)) return - - const dir = path.dirname(dstpath) - - const dirExists = await pathExists(dir) - - if (!dirExists) { - await mkdir.mkdirs(dir) - } - - await fs.link(srcpath, dstpath) -} - -function createLinkSync (srcpath, dstpath) { - let dstStat - try { - dstStat = fs.lstatSync(dstpath) - } catch {} - - try { - const srcStat = fs.lstatSync(srcpath) - if (dstStat && areIdentical(srcStat, dstStat)) return - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - const dir = path.dirname(dstpath) - const dirExists = fs.existsSync(dir) - if (dirExists) return fs.linkSync(srcpath, dstpath) - mkdir.mkdirsSync(dir) - - return fs.linkSync(srcpath, dstpath) -} - -module.exports = { - createLink: u(createLink), - createLinkSync -} - - -/***/ }), - -/***/ 25393: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const path = __nccwpck_require__(71017) -const fs = __nccwpck_require__(30201) -const { pathExists } = __nccwpck_require__(90271) - -const u = (__nccwpck_require__(61305).fromPromise) - -/** - * Function that returns two types of paths, one relative to symlink, and one - * relative to the current working directory. Checks if path is absolute or - * relative. If the path is relative, this function checks if the path is - * relative to symlink or relative to current working directory. This is an - * initiative to find a smarter `srcpath` to supply when building symlinks. - * This allows you to determine which path to use out of one of three possible - * types of source paths. The first is an absolute path. This is detected by - * `path.isAbsolute()`. When an absolute path is provided, it is checked to - * see if it exists. If it does it's used, if not an error is returned - * (callback)/ thrown (sync). The other two options for `srcpath` are a - * relative url. By default Node's `fs.symlink` works by creating a symlink - * using `dstpath` and expects the `srcpath` to be relative to the newly - * created symlink. If you provide a `srcpath` that does not exist on the file - * system it results in a broken symlink. To minimize this, the function - * checks to see if the 'relative to symlink' source file exists, and if it - * does it will use it. If it does not, it checks if there's a file that - * exists that is relative to the current working directory, if does its used. - * This preserves the expectations of the original fs.symlink spec and adds - * the ability to pass in `relative to current working direcotry` paths. - */ - -async function symlinkPaths (srcpath, dstpath) { - if (path.isAbsolute(srcpath)) { - try { - await fs.lstat(srcpath) - } catch (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - throw err - } - - return { - toCwd: srcpath, - toDst: srcpath - } - } - - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - - const exists = await pathExists(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } - - try { - await fs.lstat(srcpath) - } catch (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - throw err - } - - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } -} - -function symlinkPathsSync (srcpath, dstpath) { - if (path.isAbsolute(srcpath)) { - const exists = fs.existsSync(srcpath) - if (!exists) throw new Error('absolute srcpath does not exist') - return { - toCwd: srcpath, - toDst: srcpath - } - } - - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - const exists = fs.existsSync(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } - - const srcExists = fs.existsSync(srcpath) - if (!srcExists) throw new Error('relative srcpath does not exist') - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } -} - -module.exports = { - symlinkPaths: u(symlinkPaths), - symlinkPathsSync -} - - -/***/ }), - -/***/ 80999: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30201) -const u = (__nccwpck_require__(61305).fromPromise) - -async function symlinkType (srcpath, type) { - if (type) return type - - let stats - try { - stats = await fs.lstat(srcpath) - } catch { - return 'file' - } - - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -function symlinkTypeSync (srcpath, type) { - if (type) return type - - let stats - try { - stats = fs.lstatSync(srcpath) - } catch { - return 'file' - } - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -module.exports = { - symlinkType: u(symlinkType), - symlinkTypeSync -} - - -/***/ }), - -/***/ 14720: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -const path = __nccwpck_require__(71017) -const fs = __nccwpck_require__(30201) - -const { mkdirs, mkdirsSync } = __nccwpck_require__(21946) - -const { symlinkPaths, symlinkPathsSync } = __nccwpck_require__(25393) -const { symlinkType, symlinkTypeSync } = __nccwpck_require__(80999) - -const { pathExists } = __nccwpck_require__(90271) - -const { areIdentical } = __nccwpck_require__(23600) - -async function createSymlink (srcpath, dstpath, type) { - let stats - try { - stats = await fs.lstat(dstpath) - } catch { } - - if (stats && stats.isSymbolicLink()) { - const [srcStat, dstStat] = await Promise.all([ - fs.stat(srcpath), - fs.stat(dstpath) - ]) - - if (areIdentical(srcStat, dstStat)) return - } - - const relative = await symlinkPaths(srcpath, dstpath) - srcpath = relative.toDst - const toType = await symlinkType(relative.toCwd, type) - const dir = path.dirname(dstpath) - - if (!(await pathExists(dir))) { - await mkdirs(dir) - } - - return fs.symlink(srcpath, dstpath, toType) -} - -function createSymlinkSync (srcpath, dstpath, type) { - let stats - try { - stats = fs.lstatSync(dstpath) - } catch { } - if (stats && stats.isSymbolicLink()) { - const srcStat = fs.statSync(srcpath) - const dstStat = fs.statSync(dstpath) - if (areIdentical(srcStat, dstStat)) return - } - - const relative = symlinkPathsSync(srcpath, dstpath) - srcpath = relative.toDst - type = symlinkTypeSync(relative.toCwd, type) - const dir = path.dirname(dstpath) - const exists = fs.existsSync(dir) - if (exists) return fs.symlinkSync(srcpath, dstpath, type) - mkdirsSync(dir) - return fs.symlinkSync(srcpath, dstpath, type) -} - -module.exports = { - createSymlink: u(createSymlink), - createSymlinkSync -} - - -/***/ }), - -/***/ 30201: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// This is adapted from https://github.com/normalize/mz -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = (__nccwpck_require__(61305).fromCallback) -const fs = __nccwpck_require__(30494) - -const api = [ - 'access', - 'appendFile', - 'chmod', - 'chown', - 'close', - 'copyFile', - 'fchmod', - 'fchown', - 'fdatasync', - 'fstat', - 'fsync', - 'ftruncate', - 'futimes', - 'lchmod', - 'lchown', - 'link', - 'lstat', - 'mkdir', - 'mkdtemp', - 'open', - 'opendir', - 'readdir', - 'readFile', - 'readlink', - 'realpath', - 'rename', - 'rm', - 'rmdir', - 'stat', - 'symlink', - 'truncate', - 'unlink', - 'utimes', - 'writeFile' -].filter(key => { - // Some commands are not available on some systems. Ex: - // fs.cp was added in Node.js v16.7.0 - // fs.lchown is not available on at least some Linux - return typeof fs[key] === 'function' -}) - -// Export cloned fs: -Object.assign(exports, fs) - -// Universalify async methods: -api.forEach(method => { - exports[method] = u(fs[method]) -}) - -// We differ from mz/fs in that we still ship the old, broken, fs.exists() -// since we are a drop-in replacement for the native module -exports.exists = function (filename, callback) { - if (typeof callback === 'function') { - return fs.exists(filename, callback) - } - return new Promise(resolve => { - return fs.exists(filename, resolve) - }) -} - -// fs.read(), fs.write(), fs.readv(), & fs.writev() need special treatment due to multiple callback args - -exports.read = function (fd, buffer, offset, length, position, callback) { - if (typeof callback === 'function') { - return fs.read(fd, buffer, offset, length, position, callback) - } - return new Promise((resolve, reject) => { - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { - if (err) return reject(err) - resolve({ bytesRead, buffer }) - }) - }) -} - -// Function signature can be -// fs.write(fd, buffer[, offset[, length[, position]]], callback) -// OR -// fs.write(fd, string[, position[, encoding]], callback) -// We need to handle both cases, so we use ...args -exports.write = function (fd, buffer, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.write(fd, buffer, ...args) - } - - return new Promise((resolve, reject) => { - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { - if (err) return reject(err) - resolve({ bytesWritten, buffer }) - }) - }) -} - -// Function signature is -// s.readv(fd, buffers[, position], callback) -// We need to handle the optional arg, so we use ...args -exports.readv = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.readv(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.readv(fd, buffers, ...args, (err, bytesRead, buffers) => { - if (err) return reject(err) - resolve({ bytesRead, buffers }) - }) - }) -} - -// Function signature is -// s.writev(fd, buffers[, position], callback) -// We need to handle the optional arg, so we use ...args -exports.writev = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.writev(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { - if (err) return reject(err) - resolve({ bytesWritten, buffers }) - }) - }) -} - -// fs.realpath.native sometimes not available if fs is monkey-patched -if (typeof fs.realpath.native === 'function') { - exports.realpath.native = u(fs.realpath.native) -} else { - process.emitWarning( - 'fs.realpath.native is not a function. Is fs being monkey-patched?', - 'Warning', 'fs-extra-WARN0003' - ) -} - - -/***/ }), - -/***/ 46692: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -module.exports = { - // Export promiseified graceful-fs: - ...__nccwpck_require__(30201), - // Export extra methods: - ...__nccwpck_require__(27171), - ...__nccwpck_require__(49945), - ...__nccwpck_require__(13157), - ...__nccwpck_require__(12610), - ...__nccwpck_require__(21946), - ...__nccwpck_require__(21553), - ...__nccwpck_require__(42775), - ...__nccwpck_require__(90271), - ...__nccwpck_require__(46179) -} - - -/***/ }), - -/***/ 12610: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -const jsonFile = __nccwpck_require__(34844) - -jsonFile.outputJson = u(__nccwpck_require__(1598)) -jsonFile.outputJsonSync = __nccwpck_require__(82861) -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile - - -/***/ }), - -/***/ 34844: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const jsonFile = __nccwpck_require__(54262) - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} - - -/***/ }), - -/***/ 82861: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(92923) -const { outputFileSync } = __nccwpck_require__(42775) - -function outputJsonSync (file, data, options) { - const str = stringify(data, options) - - outputFileSync(file, str, options) -} - -module.exports = outputJsonSync - - -/***/ }), - -/***/ 1598: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { stringify } = __nccwpck_require__(92923) -const { outputFile } = __nccwpck_require__(42775) - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) - - await outputFile(file, str, options) -} - -module.exports = outputJson - - -/***/ }), - -/***/ 21946: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = (__nccwpck_require__(61305).fromPromise) -const { makeDir: _makeDir, makeDirSync } = __nccwpck_require__(16007) -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} - - -/***/ }), - -/***/ 16007: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const fs = __nccwpck_require__(30201) -const { checkPath } = __nccwpck_require__(27105) - -const getMode = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') return options - return ({ ...defaults, ...options }).mode -} - -module.exports.makeDir = async (dir, options) => { - checkPath(dir) - - return fs.mkdir(dir, { - mode: getMode(options), - recursive: true - }) -} - -module.exports.makeDirSync = (dir, options) => { - checkPath(dir) - - return fs.mkdirSync(dir, { - mode: getMode(options), - recursive: true - }) -} - - -/***/ }), - -/***/ 27105: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -const path = __nccwpck_require__(71017) - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -module.exports.checkPath = function checkPath (pth) { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} - - -/***/ }), - -/***/ 21553: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -module.exports = { - move: u(__nccwpck_require__(18170)), - moveSync: __nccwpck_require__(95366) -} - - -/***/ }), - -/***/ 95366: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30494) -const path = __nccwpck_require__(71017) -const copySync = (__nccwpck_require__(27171).copySync) -const removeSync = (__nccwpck_require__(46179).removeSync) -const mkdirpSync = (__nccwpck_require__(21946).mkdirpSync) -const stat = __nccwpck_require__(23600) - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'move') - if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite, isChangingCase) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase) { - if (isChangingCase) return rename(src, dest, overwrite) - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true, - preserveTimestamps: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync - - -/***/ }), - -/***/ 18170: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30201) -const path = __nccwpck_require__(71017) -const { copy } = __nccwpck_require__(27171) -const { remove } = __nccwpck_require__(46179) -const { mkdirp } = __nccwpck_require__(21946) -const { pathExists } = __nccwpck_require__(90271) -const stat = __nccwpck_require__(23600) - -async function move (src, dest, opts = {}) { - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = await stat.checkPaths(src, dest, 'move', opts) - - await stat.checkParentPaths(src, srcStat, dest, 'move') - - // If the parent of dest is not root, make sure it exists before proceeding - const destParent = path.dirname(dest) - const parsedParentPath = path.parse(destParent) - if (parsedParentPath.root !== destParent) { - await mkdirp(destParent) - } - - return doRename(src, dest, overwrite, isChangingCase) -} - -async function doRename (src, dest, overwrite, isChangingCase) { - if (!isChangingCase) { - if (overwrite) { - await remove(dest) - } else if (await pathExists(dest)) { - throw new Error('dest already exists.') - } - } - - try { - // Try w/ rename first, and try copy + remove if EXDEV - await fs.rename(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') { - throw err - } - await moveAcrossDevice(src, dest, overwrite) - } -} - -async function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true, - preserveTimestamps: true - } - - await copy(src, dest, opts) - return remove(src) -} - -module.exports = move - - -/***/ }), - -/***/ 42775: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const u = (__nccwpck_require__(61305).fromPromise) -const fs = __nccwpck_require__(30201) -const path = __nccwpck_require__(71017) -const mkdir = __nccwpck_require__(21946) -const pathExists = (__nccwpck_require__(90271).pathExists) - -async function outputFile (file, data, encoding = 'utf-8') { - const dir = path.dirname(file) - - if (!(await pathExists(dir))) { - await mkdir.mkdirs(dir) - } - - return fs.writeFile(file, data, encoding) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (!fs.existsSync(dir)) { - mkdir.mkdirsSync(dir) - } - - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} - - -/***/ }), - -/***/ 90271: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const u = (__nccwpck_require__(61305).fromPromise) -const fs = __nccwpck_require__(30201) - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} - - -/***/ }), - -/***/ 46179: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30494) -const u = (__nccwpck_require__(61305).fromCallback) - -function remove (path, callback) { - fs.rm(path, { recursive: true, force: true }, callback) -} - -function removeSync (path) { - fs.rmSync(path, { recursive: true, force: true }) -} - -module.exports = { - remove: u(remove), - removeSync -} - - -/***/ }), - -/***/ 23600: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30201) -const path = __nccwpck_require__(71017) -const u = (__nccwpck_require__(61305).fromPromise) - -function getStats (src, dest, opts) { - const statFunc = opts.dereference - ? (file) => fs.stat(file, { bigint: true }) - : (file) => fs.lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest, opts) { - let destStat - const statFunc = opts.dereference - ? (file) => fs.statSync(file, { bigint: true }) - : (file) => fs.lstatSync(file, { bigint: true }) - const srcStat = statFunc(src) - try { - destStat = statFunc(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -async function checkPaths (src, dest, funcName, opts) { - const { srcStat, destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - - return { srcStat, destStat } -} - -function checkPathsSync (src, dest, funcName, opts) { - const { srcStat, destStat } = getStatsSync(src, dest, opts) - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - - let destStat - try { - destStat = await fs.stat(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - - return checkParentPaths(src, srcStat, destParent, funcName) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = fs.statSync(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - // checkPaths - checkPaths: u(checkPaths), - checkPathsSync, - // checkParent - checkParentPaths: u(checkParentPaths), - checkParentPathsSync, - // Misc - isSrcSubdir, - areIdentical -} - - -/***/ }), - -/***/ 15475: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const fs = __nccwpck_require__(30201) -const u = (__nccwpck_require__(61305).fromPromise) - -async function utimesMillis (path, atime, mtime) { - // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) - const fd = await fs.open(path, 'r+') - - let closeErr = null - - try { - await fs.futimes(fd, atime, mtime) - } finally { - try { - await fs.close(fd) - } catch (e) { - closeErr = e - } - } - - if (closeErr) { - throw closeErr - } -} - -function utimesMillisSync (path, atime, mtime) { - const fd = fs.openSync(path, 'r+') - fs.futimesSync(fd, atime, mtime) - return fs.closeSync(fd) -} - -module.exports = { - utimesMillis: u(utimesMillis), - utimesMillisSync -} - - /***/ }), /***/ 6887: @@ -26610,122 +25011,6 @@ var jsonfile = { module.exports = jsonfile -/***/ }), - -/***/ 54262: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -let _fs -try { - _fs = __nccwpck_require__(30494) -} catch (_) { - _fs = __nccwpck_require__(57147) -} -const universalify = __nccwpck_require__(61305) -const { stringify, stripBom } = __nccwpck_require__(92923) - -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - let data = await universalify.fromCallback(fs.readFile)(file, options) - - data = stripBom(data) - - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } - - return obj -} - -const readFile = universalify.fromPromise(_readFile) - -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } -} - -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - - await universalify.fromCallback(fs.writeFile)(file, str, options) -} - -const writeFile = universalify.fromPromise(_writeFile) - -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) -} - -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} - -module.exports = jsonfile - - -/***/ }), - -/***/ 92923: -/***/ ((module) => { - -function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { - const EOF = finalEOL ? EOL : '' - const str = JSON.stringify(obj, replacer, spaces) - - return str.replace(/\n/g, EOL) + EOF -} - -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} - -module.exports = { stringify, stripBom } - - /***/ }), /***/ 73871: @@ -63522,38 +61807,6 @@ exports.p = function (fn) { } -/***/ }), - -/***/ 61305: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - args.push((err, res) => (err != null) ? reject(err) : resolve(res)) - fn.apply(this, args) - }) - } - }, 'name', { value: fn.name }) -} - -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else { - args.pop() - fn.apply(this, args).then(r => cb(null, r), cb) - } - }, 'name', { value: fn.name }) -} - - /***/ }), /***/ 24187: @@ -65520,10 +63773,16 @@ var __asyncValues = (this && this.__asyncValues) || function (o) { function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getChangedAllFiles = getChangedAllFiles; +exports.findNearestPackageJson = findNearestPackageJson; +const path_1 = __importDefault(__nccwpck_require__(71017)); const core = __importStar(__nccwpck_require__(6108)); const github = __importStar(__nccwpck_require__(31645)); +const fs_extra_1 = __importDefault(__nccwpck_require__(20077)); const utils_1 = __nccwpck_require__(53927); function getChangedAllFiles(_a) { return __awaiter(this, arguments, void 0, function* ({ pullNumber }) { @@ -65555,6 +63814,17 @@ function getChangedAllFiles(_a) { return changedFiles; }); } +function findNearestPackageJson(filePath) { + let currentDir = path_1.default.dirname(filePath); + while (currentDir !== path_1.default.parse(currentDir).root) { + const packageJsonPath = path_1.default.join(currentDir, 'package.json'); + if (fs_extra_1.default.existsSync(packageJsonPath)) { + return packageJsonPath; + } + currentDir = path_1.default.dirname(currentDir); + } + return undefined; +} /***/ }), @@ -66039,7 +64309,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createReleaseTag = void 0; const path_1 = __importDefault(__nccwpck_require__(71017)); const github = __importStar(__nccwpck_require__(31645)); -const fs_extra_1 = __importDefault(__nccwpck_require__(46692)); +const fs_extra_1 = __importDefault(__nccwpck_require__(20077)); const apis_1 = __importDefault(__nccwpck_require__(6500)); const file_1 = __nccwpck_require__(80398); const createReleaseTag = (_a) => __awaiter(void 0, [_a], void 0, function* ({ pkg, tagName }) { @@ -66106,7 +64376,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.setNpmRc = setNpmRc; const core = __importStar(__nccwpck_require__(6108)); -const fs_extra_1 = __importDefault(__nccwpck_require__(46692)); +const fs_extra_1 = __importDefault(__nccwpck_require__(20077)); function setNpmRc() { return __awaiter(this, void 0, void 0, function* () { core.info('No changesets found, attempting to publish any unpublished packages to npm'); @@ -66230,7 +64500,7 @@ const path_1 = __importDefault(__nccwpck_require__(71017)); const core = __importStar(__nccwpck_require__(6108)); const exec_1 = __nccwpck_require__(39629); const github = __importStar(__nccwpck_require__(31645)); -const fs_extra_1 = __importDefault(__nccwpck_require__(46692)); +const fs_extra_1 = __importDefault(__nccwpck_require__(20077)); const resolve_from_1 = __importDefault(__nccwpck_require__(41345)); const utils_1 = __nccwpck_require__(53927); const file_1 = __nccwpck_require__(80398); diff --git a/publish/package.json b/publish/package.json index dca852e..43b422d 100644 --- a/publish/package.json +++ b/publish/package.json @@ -14,7 +14,6 @@ "dependencies": { "@changesets/read": "^0.6.0", "@manypkg/get-packages": "^1.1.3", - "fs-extra": "^11.2.0", "mdast-util-to-string": "^4.0.0", "remark-parse": "^7.0.2", "remark-stringify": "^7.0.3", @@ -22,7 +21,6 @@ "unified": "^8.4.2" }, "devDependencies": { - "@types/fs-extra": "^11.0.4", "@types/node": "^20.14.9" } }