From a56f68840fcde9a887afb6d986091286a13c1830 Mon Sep 17 00:00:00 2001 From: Sora Morimoto Date: Tue, 21 Feb 2023 08:56:02 +0900 Subject: [PATCH 1/3] Speed up caching and get rid of bugs and hacks on Windows Signed-off-by: Sora Morimoto --- dist/index.js | 430 +++++++++++++++++++++-------------- dist/post/index.js | 380 ++++++++++++++++++++----------- package.json | 2 +- src/setup-ocaml/installer.ts | 55 +---- src/setup-ocaml/opam.ts | 8 +- src/setup-ocaml/post.ts | 16 +- yarn.lock | 31 ++- 7 files changed, 531 insertions(+), 391 deletions(-) diff --git a/dist/index.js b/dist/index.js index 381a29ff..78552544 100644 --- a/dist/index.js +++ b/dist/index.js @@ -74,9 +74,10 @@ exports.isFeatureAvailable = isFeatureAvailable; * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ -function restoreCache(paths, primaryKey, restoreKeys, options) { +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); restoreKeys = restoreKeys || []; @@ -94,7 +95,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod + compressionMethod, + enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found @@ -141,10 +143,11 @@ exports.restoreCache = restoreCache; * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ -function saveCache(paths, key, options) { +function saveCache(paths, key, options, enableCrossOsArchive = false) { var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); @@ -175,6 +178,7 @@ function saveCache(paths, key, options) { core.debug('Reserving Cache'); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, + enableCrossOsArchive, cacheSize: archiveFileSize }); if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { @@ -247,7 +251,6 @@ const crypto = __importStar(__nccwpck_require__(6113)); const fs = __importStar(__nccwpck_require__(57147)); const url_1 = __nccwpck_require__(57310); const utils = __importStar(__nccwpck_require__(91518)); -const constants_1 = __nccwpck_require__(88840); const downloadUtils_1 = __nccwpck_require__(55500); const options_1 = __nccwpck_require__(76215); const requestUtils_1 = __nccwpck_require__(13981); @@ -277,10 +280,17 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); } -function getCacheVersion(paths, compressionMethod) { - const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip - ? [] - : [compressionMethod]); +function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths; + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod); + } + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only'); + } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto @@ -292,10 +302,15 @@ exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -304,6 +319,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -313,6 +329,22 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); @@ -333,7 +365,7 @@ exports.downloadCache = downloadCache; function reserveCache(key, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const reserveCacheRequest = { key, version, @@ -551,12 +583,13 @@ function unlinkFile(filePath) { }); } exports.unlinkFile = unlinkFile; -function getVersion(app) { +function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Checking ${app} --version`); let versionOutput = ''; + additionalArgs.push('--version'); + core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { - yield exec.exec(`${app} --version`, [], { + yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { @@ -576,23 +609,14 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } - const versionOutput = yield getVersion('zstd'); + const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); - if (!versionOutput.toLowerCase().includes('zstd command line interface')) { - // zstd is not installed + core.debug(`zstd version: ${version}`); + if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } - else if (!version || semver.lt(version, 'v1.3.2')) { - // zstd is installed but using a version earlier than v1.3.2 - // v1.3.2 is required to use the `--long` options in zstd - return constants_1.CompressionMethod.ZstdWithoutLong; - } else { - return constants_1.CompressionMethod.Zstd; + return constants_1.CompressionMethod.ZstdWithoutLong; } }); } @@ -603,13 +627,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -645,6 +672,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -653,6 +685,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), @@ -1071,21 +1109,19 @@ const path = __importStar(__nccwpck_require__(71017)); const utils = __importStar(__nccwpck_require__(91518)); const constants_1 = __nccwpck_require__(88840); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -1093,25 +1129,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } + } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -1119,91 +1222,119 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); +} +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); } +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) + }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -126552,10 +126683,8 @@ async function acquireOpamWindows() { const opamVersion = "0.0.0.2"; const cachedPath = tool_cache.find("opam", opamVersion); if (cachedPath === "") { - const downloadedPath = await tool_cache.downloadTool(`https://github.com/fdopen/opam-repository-mingw/releases/download/${opamVersion}/opam64.tar.xz`); - const extractedPath = await tool_cache.extractTar(downloadedPath, undefined, [ - "xv", - ]); + const downloadedPath = await tool_cache.downloadTool(`https://github.com/fdopen/opam-repository-mingw/releases/download/${opamVersion}/opam64.zip`); + const extractedPath = await tool_cache.extractZip(downloadedPath); const cachedPath = await tool_cache.cacheDir(extractedPath, "opam", opamVersion); const installSh = external_node_path_namespaceObject.join(cachedPath, "opam64", "install.sh"); await external_node_fs_namespaceObject.promises.chmod(installSh, 0o755); @@ -126595,11 +126724,11 @@ async function setupOpamWindows() { lib_core.addPath(CYGWIN_ROOT_WRAPPERBIN); await setupCygwin(); lib_core.endGroup(); + await saveCygwinCache(); // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const originalPath = external_node_process_namespaceObject.env.PATH.split(external_node_path_namespaceObject.delimiter); const patchedPath = [CYGWIN_ROOT_BIN, ...originalPath]; external_node_process_namespaceObject.env.PATH = patchedPath.join(external_node_path_namespaceObject.delimiter); - await saveCygwinCache(); lib_core.startGroup("Install opam"); await acquireOpamWindows(); lib_core.endGroup(); @@ -126765,27 +126894,9 @@ async function installer() { lib_core.exportVariable("MSYS", "winsymlinks:native"); } if (platform === "win32" /* Platform.Win32 */) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = external_node_process_namespaceObject.env.PATH.split(external_node_path_namespaceObject.delimiter); - const msys64Path = external_node_path_namespaceObject.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - external_node_process_namespaceObject.env.PATH = patchedPath.join(external_node_path_namespaceObject.delimiter); await restoreCygwinCache(); - external_node_process_namespaceObject.env.PATH = originalPath.join(external_node_path_namespaceObject.delimiter); - } - let opamCacheHit; - if (platform === "win32" /* Platform.Win32 */) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = external_node_process_namespaceObject.env.PATH.split(external_node_path_namespaceObject.delimiter); - const msys64Path = external_node_path_namespaceObject.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - external_node_process_namespaceObject.env.PATH = patchedPath.join(external_node_path_namespaceObject.delimiter); - opamCacheHit = await restoreOpamCache(); - external_node_process_namespaceObject.env.PATH = originalPath.join(external_node_path_namespaceObject.delimiter); - } - else { - opamCacheHit = await restoreOpamCache(); } + const opamCacheHit = await restoreOpamCache(); await setupOpam(); await repositoryRemoveAll(); await repositoryAddAll(OPAM_REPOSITORIES); @@ -126796,47 +126907,14 @@ async function installer() { : `ocaml-base-compiler.${await resolveVersion(OCAML_COMPILER)}` : OCAML_COMPILER; await installOcaml(ocamlCompiler); - if (platform === "win32" /* Platform.Win32 */) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = external_node_process_namespaceObject.env.PATH.split(external_node_path_namespaceObject.delimiter); - const msys64Path = external_node_path_namespaceObject.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - external_node_process_namespaceObject.env.PATH = patchedPath.join(external_node_path_namespaceObject.delimiter); - await saveOpamCache(); - external_node_process_namespaceObject.env.PATH = originalPath.join(external_node_path_namespaceObject.delimiter); - } - else { - await saveOpamCache(); - } - } - if (platform === "win32" /* Platform.Win32 */) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = external_node_process_namespaceObject.env.PATH.split(external_node_path_namespaceObject.delimiter); - const msys64Path = external_node_path_namespaceObject.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - external_node_process_namespaceObject.env.PATH = patchedPath.join(external_node_path_namespaceObject.delimiter); - await restoreOpamDownloadCache(); - external_node_process_namespaceObject.env.PATH = originalPath.join(external_node_path_namespaceObject.delimiter); - } - else { - await restoreOpamDownloadCache(); + await saveOpamCache(); } + await restoreOpamDownloadCache(); if (OPAM_DEPEXT) { await installDepext(platform); } if (DUNE_CACHE) { - if (platform === "win32" /* Platform.Win32 */) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = external_node_process_namespaceObject.env.PATH.split(external_node_path_namespaceObject.delimiter); - const msys64Path = external_node_path_namespaceObject.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - external_node_process_namespaceObject.env.PATH = patchedPath.join(external_node_path_namespaceObject.delimiter); - await restoreDuneCache(); - external_node_process_namespaceObject.env.PATH = originalPath.join(external_node_path_namespaceObject.delimiter); - } - else { - await restoreDuneCache(); - } + await restoreDuneCache(); await installDune(); lib_core.exportVariable("DUNE_CACHE", "enabled"); lib_core.exportVariable("DUNE_CACHE_TRANSPORT", "direct"); diff --git a/dist/post/index.js b/dist/post/index.js index 5640eeb6..b7e5ca74 100644 --- a/dist/post/index.js +++ b/dist/post/index.js @@ -74,9 +74,10 @@ exports.isFeatureAvailable = isFeatureAvailable; * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ -function restoreCache(paths, primaryKey, restoreKeys, options) { +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); restoreKeys = restoreKeys || []; @@ -94,7 +95,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod + compressionMethod, + enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found @@ -141,10 +143,11 @@ exports.restoreCache = restoreCache; * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ -function saveCache(paths, key, options) { +function saveCache(paths, key, options, enableCrossOsArchive = false) { var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); @@ -175,6 +178,7 @@ function saveCache(paths, key, options) { core.debug('Reserving Cache'); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, + enableCrossOsArchive, cacheSize: archiveFileSize }); if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { @@ -247,7 +251,6 @@ const crypto = __importStar(__nccwpck_require__(6113)); const fs = __importStar(__nccwpck_require__(57147)); const url_1 = __nccwpck_require__(57310); const utils = __importStar(__nccwpck_require__(91518)); -const constants_1 = __nccwpck_require__(88840); const downloadUtils_1 = __nccwpck_require__(55500); const options_1 = __nccwpck_require__(76215); const requestUtils_1 = __nccwpck_require__(13981); @@ -277,10 +280,17 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); } -function getCacheVersion(paths, compressionMethod) { - const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip - ? [] - : [compressionMethod]); +function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths; + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod); + } + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only'); + } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto @@ -292,10 +302,15 @@ exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -304,6 +319,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -313,6 +329,22 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); @@ -333,7 +365,7 @@ exports.downloadCache = downloadCache; function reserveCache(key, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const reserveCacheRequest = { key, version, @@ -551,12 +583,13 @@ function unlinkFile(filePath) { }); } exports.unlinkFile = unlinkFile; -function getVersion(app) { +function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Checking ${app} --version`); let versionOutput = ''; + additionalArgs.push('--version'); + core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { - yield exec.exec(`${app} --version`, [], { + yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { @@ -576,23 +609,14 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } - const versionOutput = yield getVersion('zstd'); + const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); - if (!versionOutput.toLowerCase().includes('zstd command line interface')) { - // zstd is not installed + core.debug(`zstd version: ${version}`); + if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } - else if (!version || semver.lt(version, 'v1.3.2')) { - // zstd is installed but using a version earlier than v1.3.2 - // v1.3.2 is required to use the `--long` options in zstd - return constants_1.CompressionMethod.ZstdWithoutLong; - } else { - return constants_1.CompressionMethod.Zstd; + return constants_1.CompressionMethod.ZstdWithoutLong; } }); } @@ -603,13 +627,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -645,6 +672,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -653,6 +685,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), @@ -1071,21 +1109,19 @@ const path = __importStar(__nccwpck_require__(71017)); const utils = __importStar(__nccwpck_require__(91518)); const constants_1 = __nccwpck_require__(88840); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -1093,25 +1129,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } + } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -1119,91 +1222,119 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); +} +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) + }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); } +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -105201,12 +105332,12 @@ __nccwpck_require__.d(forms_namespaceObject, { "serializeArray": () => (serializeArray) }); -;// CONCATENATED MODULE: external "node:path" -const external_node_path_namespaceObject = require("node:path"); // EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js var lib_core = __nccwpck_require__(42186); ;// CONCATENATED MODULE: external "node:os" const external_node_os_namespaceObject = require("node:os"); +;// CONCATENATED MODULE: external "node:path" +const external_node_path_namespaceObject = require("node:path"); ;// CONCATENATED MODULE: external "node:process" const external_node_process_namespaceObject = require("node:process"); // EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js @@ -122571,26 +122702,13 @@ async function trimDuneCache() { - - async function run() { try { - const platform = system_getPlatform(); - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"].split(external_node_path_namespaceObject.delimiter); - if (platform === "win32" /* Platform.Win32 */) { - const msys64Path = external_node_path_namespaceObject.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(external_node_path_namespaceObject.delimiter); - } if (DUNE_CACHE) { await trimDuneCache(); await saveDuneCache(); } await saveOpamDownloadCache(); - if (platform === "win32" /* Platform.Win32 */) { - process.env["PATH"] = originalPath.join(external_node_path_namespaceObject.delimiter); - } } catch (error) { if (error instanceof Error) { diff --git a/package.json b/package.json index 76630b4c..09f5fb68 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,7 @@ "build": "npm-run-all -p 'build:**'" }, "dependencies": { - "@actions/cache": "3.0.6", + "@actions/cache": "3.1.4", "@actions/core": "1.10.0", "@actions/exec": "1.1.1", "@actions/github": "5.1.1", diff --git a/src/setup-ocaml/installer.ts b/src/setup-ocaml/installer.ts index ecbfd0fe..af41e1b6 100644 --- a/src/setup-ocaml/installer.ts +++ b/src/setup-ocaml/installer.ts @@ -70,26 +70,9 @@ export async function installer(): Promise { core.exportVariable("MSYS", "winsymlinks:native"); } if (platform === Platform.Win32) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"]!.split(path.delimiter); - const msys64Path = path.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(path.delimiter); await restoreCygwinCache(); - process.env["PATH"] = originalPath.join(path.delimiter); - } - let opamCacheHit; - if (platform === Platform.Win32) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"]!.split(path.delimiter); - const msys64Path = path.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(path.delimiter); - opamCacheHit = await restoreOpamCache(); - process.env["PATH"] = originalPath.join(path.delimiter); - } else { - opamCacheHit = await restoreOpamCache(); } + const opamCacheHit = await restoreOpamCache(); await setupOpam(); await repositoryRemoveAll(); await repositoryAddAll(OPAM_REPOSITORIES); @@ -100,44 +83,14 @@ export async function installer(): Promise { : `ocaml-base-compiler.${await resolveVersion(OCAML_COMPILER)}` : OCAML_COMPILER; await installOcaml(ocamlCompiler); - if (platform === Platform.Win32) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"]!.split(path.delimiter); - const msys64Path = path.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(path.delimiter); - await saveOpamCache(); - process.env["PATH"] = originalPath.join(path.delimiter); - } else { - await saveOpamCache(); - } - } - if (platform === Platform.Win32) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"]!.split(path.delimiter); - const msys64Path = path.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(path.delimiter); - await restoreOpamDownloadCache(); - process.env["PATH"] = originalPath.join(path.delimiter); - } else { - await restoreOpamDownloadCache(); + await saveOpamCache(); } + await restoreOpamDownloadCache(); if (OPAM_DEPEXT) { await installDepext(platform); } if (DUNE_CACHE) { - if (platform === Platform.Win32) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"]!.split(path.delimiter); - const msys64Path = path.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(path.delimiter); - await restoreDuneCache(); - process.env["PATH"] = originalPath.join(path.delimiter); - } else { - await restoreDuneCache(); - } + await restoreDuneCache(); await installDune(); core.exportVariable("DUNE_CACHE", "enabled"); core.exportVariable("DUNE_CACHE_TRANSPORT", "direct"); diff --git a/src/setup-ocaml/opam.ts b/src/setup-ocaml/opam.ts index a9eabdae..8efd7de7 100644 --- a/src/setup-ocaml/opam.ts +++ b/src/setup-ocaml/opam.ts @@ -212,11 +212,9 @@ async function acquireOpamWindows() { const cachedPath = tc.find("opam", opamVersion); if (cachedPath === "") { const downloadedPath = await tc.downloadTool( - `https://github.com/fdopen/opam-repository-mingw/releases/download/${opamVersion}/opam64.tar.xz` + `https://github.com/fdopen/opam-repository-mingw/releases/download/${opamVersion}/opam64.zip` ); - const extractedPath = await tc.extractTar(downloadedPath, undefined, [ - "xv", - ]); + const extractedPath = await tc.extractZip(downloadedPath); const cachedPath = await tc.cacheDir(extractedPath, "opam", opamVersion); const installSh = path.join(cachedPath, "opam64", "install.sh"); await fs.chmod(installSh, 0o755); @@ -257,11 +255,11 @@ async function setupOpamWindows() { core.addPath(CYGWIN_ROOT_WRAPPERBIN); await setupCygwin(); core.endGroup(); + await saveCygwinCache(); // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const originalPath = process.env["PATH"]!.split(path.delimiter); const patchedPath = [CYGWIN_ROOT_BIN, ...originalPath]; process.env["PATH"] = patchedPath.join(path.delimiter); - await saveCygwinCache(); core.startGroup("Install opam"); await acquireOpamWindows(); core.endGroup(); diff --git a/src/setup-ocaml/post.ts b/src/setup-ocaml/post.ts index 7c6d34e2..4461d720 100644 --- a/src/setup-ocaml/post.ts +++ b/src/setup-ocaml/post.ts @@ -1,30 +1,16 @@ -import * as path from "node:path"; - import * as core from "@actions/core"; import { saveDuneCache, saveOpamDownloadCache } from "./cache"; -import { DUNE_CACHE, Platform } from "./constants"; +import { DUNE_CACHE } from "./constants"; import { trimDuneCache } from "./dune"; -import { getPlatform } from "./system"; async function run() { try { - const platform = getPlatform(); - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const originalPath = process.env["PATH"]!.split(path.delimiter); - if (platform === Platform.Win32) { - const msys64Path = path.join("C:", "msys64", "usr", "bin"); - const patchedPath = [msys64Path, ...originalPath]; - process.env["PATH"] = patchedPath.join(path.delimiter); - } if (DUNE_CACHE) { await trimDuneCache(); await saveDuneCache(); } await saveOpamDownloadCache(); - if (platform === Platform.Win32) { - process.env["PATH"] = originalPath.join(path.delimiter); - } } catch (error) { if (error instanceof Error) { core.error(error.message); diff --git a/yarn.lock b/yarn.lock index fe4daa8f..f4815e38 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5,9 +5,9 @@ __metadata: version: 6 cacheKey: 8 -"@actions/cache@npm:3.0.6": - version: 3.0.6 - resolution: "@actions/cache@npm:3.0.6" +"@actions/cache@npm:3.1.4": + version: 3.1.4 + resolution: "@actions/cache@npm:3.1.4" dependencies: "@actions/core": ^1.10.0 "@actions/exec": ^1.0.1 @@ -19,7 +19,7 @@ __metadata: "@azure/storage-blob": ^12.8.0 semver: ^6.1.0 uuid: ^3.3.3 - checksum: 1cc63d07ad514454d7fb4d2cde928157d31847c2b9eafb081fbd518c8619ed6a66c9bd570d60787610ad152bd69839358176f03f1ee5f9d1baa660236cdfcfdf + checksum: 6816cda7819a9dbd0cf69ac4f37ed287ce4c176ff5929b91046f626b1880e21557ed4fb45ae62b9812ae6e0a55205c9fbe598f28a2a69afdaa452151c244a44e languageName: node linkType: hard @@ -479,7 +479,14 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:18.13.0": +"@types/node@npm:*": + version: 18.14.0 + resolution: "@types/node@npm:18.14.0" + checksum: d83fcf5e4ed544755dd9028f5cbb6b9d46235043159111bb2ad62223729aee581c0144a9f6df8ba73d74011db9ed4ebd7af2fd5e0996714e3beb508a5da8ac5c + languageName: node + linkType: hard + +"@types/node@npm:18.13.0": version: 18.13.0 resolution: "@types/node@npm:18.13.0" checksum: 4ea10f8802848b01672bce938f678b6774ca2cee0c9774f12275ab064ae07818419c3e2e41d6257ce7ba846d1ea26c63214aa1dfa4166fa3746291752b8c6416 @@ -1547,11 +1554,11 @@ __metadata: linkType: hard "esquery@npm:^1.4.0": - version: 1.4.0 - resolution: "esquery@npm:1.4.0" + version: 1.4.2 + resolution: "esquery@npm:1.4.2" dependencies: estraverse: ^5.1.0 - checksum: a0807e17abd7fbe5fbd4fab673038d6d8a50675cdae6b04fbaa520c34581be0c5fa24582990e8acd8854f671dd291c78bb2efb9e0ed5b62f33bac4f9cf820210 + checksum: 2f4ad89c5aafaca61cc2c15e256190f0d6deb4791cae6552d3cb4b1eb8867958cdf27a56aaa3272ff17435e3eaa19ee0d4129fac336ca6373d7354d7b5da7966 languageName: node linkType: hard @@ -2422,9 +2429,9 @@ __metadata: linkType: hard "lru-cache@npm:^7.7.1": - version: 7.14.1 - resolution: "lru-cache@npm:7.14.1" - checksum: d72c6713c6a6d86836a7a6523b3f1ac6764768cca47ec99341c3e76db06aacd4764620e5e2cda719a36848785a52a70e531822dc2b33fb071fa709683746c104 + version: 7.16.1 + resolution: "lru-cache@npm:7.16.1" + checksum: 64618e3ed4fd1203afedd9bbf5247921b1419f8e3100f20e58e5f04e741f8287bd7d04fefaad332411bb53b3a73445714b235de750cf5d310cba1fa23bd82795 languageName: node linkType: hard @@ -3129,7 +3136,7 @@ __metadata: version: 0.0.0-use.local resolution: "root-workspace-0b6124@workspace:." dependencies: - "@actions/cache": 3.0.6 + "@actions/cache": 3.1.4 "@actions/core": 1.10.0 "@actions/exec": 1.1.1 "@actions/github": 5.1.1 From 6b9e48f325509faa38ebb4bdce072a2c81b3a04c Mon Sep 17 00:00:00 2001 From: Sora Morimoto Date: Tue, 21 Feb 2023 09:40:03 +0900 Subject: [PATCH 2/3] Update npm deps Signed-off-by: Sora Morimoto --- lint-fmt/index.js | 5 +- package.json | 8 +-- yarn.lock | 121 ++++++++++++++++++++++------------------------ 3 files changed, 64 insertions(+), 70 deletions(-) diff --git a/lint-fmt/index.js b/lint-fmt/index.js index 01b182e2..375e0715 100644 --- a/lint-fmt/index.js +++ b/lint-fmt/index.js @@ -3724,6 +3724,7 @@ var floor = Math.floor; var charAt = uncurryThis(''.charAt); var replace = uncurryThis(''.replace); var stringSlice = uncurryThis(''.slice); +// eslint-disable-next-line redos/no-vulnerable -- safe var SUBSTITUTION_SYMBOLS = /\$([$&'`]|\d{1,2}|<[^>]*>)/g; var SUBSTITUTION_SYMBOLS_NO_NAMED = /\$([$&'`]|\d{1,2})/g; @@ -4840,10 +4841,10 @@ var store = __nccwpck_require__(9557); (module.exports = function (key, value) { return store[key] || (store[key] = value !== undefined ? value : {}); })('versions', []).push({ - version: '3.27.2', + version: '3.28.0', mode: IS_PURE ? 'pure' : 'global', copyright: '© 2014-2023 Denis Pushkarev (zloirock.ru)', - license: 'https://github.com/zloirock/core-js/blob/v3.27.2/LICENSE', + license: 'https://github.com/zloirock/core-js/blob/v3.28.0/LICENSE', source: 'https://github.com/zloirock/core-js' }); diff --git a/package.json b/package.json index 09f5fb68..ae8e9695 100644 --- a/package.json +++ b/package.json @@ -23,17 +23,17 @@ "@actions/io": "1.1.2", "@actions/tool-cache": "2.0.1", "cheerio": "1.0.0-rc.12", - "core-js": "3.27.2", + "core-js": "3.28.0", "date-fns": "2.29.3", "semver": "7.3.8", "yaml": "2.2.1" }, "devDependencies": { "@tsconfig/strictest": "1.0.2", - "@types/node": "18.13.0", + "@types/node": "18.14.0", "@types/semver": "7.3.13", - "@typescript-eslint/eslint-plugin": "5.51.0", - "@typescript-eslint/parser": "5.51.0", + "@typescript-eslint/eslint-plugin": "5.53.0", + "@typescript-eslint/parser": "5.53.0", "@vercel/ncc": "0.36.1", "eslint": "8.34.0", "eslint-config-prettier": "8.6.0", diff --git a/yarn.lock b/yarn.lock index f4815e38..a56e18dd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -479,20 +479,13 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*": +"@types/node@npm:*, @types/node@npm:18.14.0": version: 18.14.0 resolution: "@types/node@npm:18.14.0" checksum: d83fcf5e4ed544755dd9028f5cbb6b9d46235043159111bb2ad62223729aee581c0144a9f6df8ba73d74011db9ed4ebd7af2fd5e0996714e3beb508a5da8ac5c languageName: node linkType: hard -"@types/node@npm:18.13.0": - version: 18.13.0 - resolution: "@types/node@npm:18.13.0" - checksum: 4ea10f8802848b01672bce938f678b6774ca2cee0c9774f12275ab064ae07818419c3e2e41d6257ce7ba846d1ea26c63214aa1dfa4166fa3746291752b8c6416 - languageName: node - linkType: hard - "@types/semver@npm:7.3.13, @types/semver@npm:^7.3.12": version: 7.3.13 resolution: "@types/semver@npm:7.3.13" @@ -509,13 +502,13 @@ __metadata: languageName: node linkType: hard -"@typescript-eslint/eslint-plugin@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/eslint-plugin@npm:5.51.0" +"@typescript-eslint/eslint-plugin@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/eslint-plugin@npm:5.53.0" dependencies: - "@typescript-eslint/scope-manager": 5.51.0 - "@typescript-eslint/type-utils": 5.51.0 - "@typescript-eslint/utils": 5.51.0 + "@typescript-eslint/scope-manager": 5.53.0 + "@typescript-eslint/type-utils": 5.53.0 + "@typescript-eslint/utils": 5.53.0 debug: ^4.3.4 grapheme-splitter: ^1.0.4 ignore: ^5.2.0 @@ -529,43 +522,43 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 5351d8cec13bd9867ce4aaf7052aa31c9ca867fc89c620fc0fe5718ac2cbc165903275db59974324d98e45df0d33a73a4367d236668772912731031a672cfdcd + checksum: 12dffe65969d8e5248c86a700fe46a737e55ecafb276933e747b4731eab6266fe55e2d43a34b8b340179fe248e127d861cd016a7614b1b9804cd0687c99616d1 languageName: node linkType: hard -"@typescript-eslint/parser@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/parser@npm:5.51.0" +"@typescript-eslint/parser@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/parser@npm:5.53.0" dependencies: - "@typescript-eslint/scope-manager": 5.51.0 - "@typescript-eslint/types": 5.51.0 - "@typescript-eslint/typescript-estree": 5.51.0 + "@typescript-eslint/scope-manager": 5.53.0 + "@typescript-eslint/types": 5.53.0 + "@typescript-eslint/typescript-estree": 5.53.0 debug: ^4.3.4 peerDependencies: eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 peerDependenciesMeta: typescript: optional: true - checksum: 096ec819132839febd4f390c4bbf31687e06191092c244dbd189a64cd7383fbaba728f2765e8809cd9834c0069163ab38b0e5f0f6360157d831647d4c295f8cd + checksum: 979e5d63793a9e64998b1f956ba0f00f8a2674db3a664fafce7b2433323f5248bd776af8305e2419d73a9d94c55176fee099abc5c153b4cc52e5765c725c1edd languageName: node linkType: hard -"@typescript-eslint/scope-manager@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/scope-manager@npm:5.51.0" +"@typescript-eslint/scope-manager@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/scope-manager@npm:5.53.0" dependencies: - "@typescript-eslint/types": 5.51.0 - "@typescript-eslint/visitor-keys": 5.51.0 - checksum: b3c9f48b6b7a7ae2ebcad4745ef91e4727776b2cf56d31be6456b1aa063aa649539e20f9fffa83cad9ccaaa9c492f2354a1c15526a2b789e235ec58b3a82d22c + "@typescript-eslint/types": 5.53.0 + "@typescript-eslint/visitor-keys": 5.53.0 + checksum: 51f31dc01e95908611f402441f58404da80a338c0237b2b82f4a7b0b2e8868c4bfe8f7cf44b2567dd56533de609156a5d4ac54bb1f9f09c7014b99428aef2543 languageName: node linkType: hard -"@typescript-eslint/type-utils@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/type-utils@npm:5.51.0" +"@typescript-eslint/type-utils@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/type-utils@npm:5.53.0" dependencies: - "@typescript-eslint/typescript-estree": 5.51.0 - "@typescript-eslint/utils": 5.51.0 + "@typescript-eslint/typescript-estree": 5.53.0 + "@typescript-eslint/utils": 5.53.0 debug: ^4.3.4 tsutils: ^3.21.0 peerDependencies: @@ -573,23 +566,23 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: ab9747b0c629cfaaab903eed8ce1e39d34d69a402ce5faf2f1fff2bbb461bdbe034044b1368ba67ba8e5c1c512172e07d83c8a563635d8de811bf148d95c7dec + checksum: 52c40967c5fabd58c2ae8bf519ef89e4feb511e4df630aeaeac8335661a79b6b3a32d30a61a5f1d8acc703f21c4d90751a5d41cda1b35d08867524da11bc2e1d languageName: node linkType: hard -"@typescript-eslint/types@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/types@npm:5.51.0" - checksum: b31021a0866f41ba5d71b6c4c7e20cc9b99d49c93bb7db63b55b2e51542fb75b4e27662ee86350da3c1318029e278a5a807facaf4cb5aeea724be8b0e021e836 +"@typescript-eslint/types@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/types@npm:5.53.0" + checksum: b0eaf23de4ab13697d4d2095838c959a3f410c30f0d19091e5ca08e62320c3cc3c72bcb631823fb6a4fbb31db0a059e386a0801244930d0a88a6a698e5f46548 languageName: node linkType: hard -"@typescript-eslint/typescript-estree@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/typescript-estree@npm:5.51.0" +"@typescript-eslint/typescript-estree@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/typescript-estree@npm:5.53.0" dependencies: - "@typescript-eslint/types": 5.51.0 - "@typescript-eslint/visitor-keys": 5.51.0 + "@typescript-eslint/types": 5.53.0 + "@typescript-eslint/visitor-keys": 5.53.0 debug: ^4.3.4 globby: ^11.1.0 is-glob: ^4.0.3 @@ -598,35 +591,35 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: aec23e5cab48ee72fefa6d1ac266639ebabf6cebec1e0207ad47011d3a48186ac9a632c8e34c3bac896155f54895a497230c11d789fd81263b08eb267d7113ce + checksum: 6e119c8e4167c8495d728c5556a834545a9c064918dd5e7b79b0d836726f4f8e2a0297b0ac82bf2b71f1e5427552217d0b59d8fb1406fd79bd3bf91b75dca873 languageName: node linkType: hard -"@typescript-eslint/utils@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/utils@npm:5.51.0" +"@typescript-eslint/utils@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/utils@npm:5.53.0" dependencies: "@types/json-schema": ^7.0.9 "@types/semver": ^7.3.12 - "@typescript-eslint/scope-manager": 5.51.0 - "@typescript-eslint/types": 5.51.0 - "@typescript-eslint/typescript-estree": 5.51.0 + "@typescript-eslint/scope-manager": 5.53.0 + "@typescript-eslint/types": 5.53.0 + "@typescript-eslint/typescript-estree": 5.53.0 eslint-scope: ^5.1.1 eslint-utils: ^3.0.0 semver: ^7.3.7 peerDependencies: eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - checksum: c6e28c942fbac5500f0e8ed67ef304b484ba296486e55306f78fb090dc9d5bb1f25a0bedc065e14680041eadce5e95fa10aab618cb0c316599ec987e6ea72442 + checksum: 18e6bac14ae853385a74123759850bca367904723e170c37416fc014673eb714afb6bb090367bff61494a8387e941b6af65ee5f4f845f7177fabb4df85e01643 languageName: node linkType: hard -"@typescript-eslint/visitor-keys@npm:5.51.0": - version: 5.51.0 - resolution: "@typescript-eslint/visitor-keys@npm:5.51.0" +"@typescript-eslint/visitor-keys@npm:5.53.0": + version: 5.53.0 + resolution: "@typescript-eslint/visitor-keys@npm:5.53.0" dependencies: - "@typescript-eslint/types": 5.51.0 + "@typescript-eslint/types": 5.53.0 eslint-visitor-keys: ^3.3.0 - checksum: b49710f3c6b3b62a846a163afffd81be5eb2b1f44e25bec51ff3c9f4c3b579d74aa4cbd3753b4fc09ea3dbc64a7062f9c658c08d22bb2740a599cb703d876220 + checksum: 090695883c15364c6f401e97f56b13db0f31c1114f3bd22562bd41734864d27f6a3c80de33957e9dedab2d5f94b0f4480ba3fde1d4574e74dca4593917b7b54a languageName: node linkType: hard @@ -1044,10 +1037,10 @@ __metadata: languageName: node linkType: hard -"core-js@npm:3.27.2": - version: 3.27.2 - resolution: "core-js@npm:3.27.2" - checksum: 718debd426f55a6b97cf9b757c936be258afd6d4f7052f89d0f96c982d7013e9000b0b006df42831a0cf32adad298e34d6a19052dce9ae1c7ab87162c0c665e0 +"core-js@npm:3.28.0": + version: 3.28.0 + resolution: "core-js@npm:3.28.0" + checksum: 3155fd0ec16d0089106b145e9595280a4ea4bde0d7ff26aa14364cd4f1c203baf6620c3025acd284f363d08b9f21104101692766ca9a36ffeee7307bdf3e1881 languageName: node linkType: hard @@ -3145,13 +3138,13 @@ __metadata: "@actions/io": 1.1.2 "@actions/tool-cache": 2.0.1 "@tsconfig/strictest": 1.0.2 - "@types/node": 18.13.0 + "@types/node": 18.14.0 "@types/semver": 7.3.13 - "@typescript-eslint/eslint-plugin": 5.51.0 - "@typescript-eslint/parser": 5.51.0 + "@typescript-eslint/eslint-plugin": 5.53.0 + "@typescript-eslint/parser": 5.53.0 "@vercel/ncc": 0.36.1 cheerio: 1.0.0-rc.12 - core-js: 3.27.2 + core-js: 3.28.0 date-fns: 2.29.3 eslint: 8.34.0 eslint-config-prettier: 8.6.0 From 44e5af182bd6c6945188bf5f9010272ef747a505 Mon Sep 17 00:00:00 2001 From: Sora Morimoto Date: Tue, 21 Feb 2023 09:41:14 +0900 Subject: [PATCH 3/3] Update CHANGELOG.md Signed-off-by: Sora Morimoto --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ae4d172f..fec4d2c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,11 +8,15 @@ and this project adheres to ## [unreleased] +### Fixed + +- Speed up caching and get rid of bugs and hacks on Windows. + ## [2.0.9] ### Fixed -- Take the sandbox option value into account when computing the cache key +- Take the sandbox option value into account when computing the cache key. ## [2.0.8]