From e6d7c6c0b77f8033a9678179f415735315489e9b Mon Sep 17 00:00:00 2001 From: Prajjwal Date: Wed, 16 Jul 2025 18:08:17 +0530 Subject: [PATCH] updates cache version --- dist/cleanup/index.js | 160 ++++++++++++++++++++++++++++++++++++++---- dist/setup/index.js | 160 ++++++++++++++++++++++++++++++++++++++---- package-lock.json | 17 ++--- package.json | 2 +- 4 files changed, 306 insertions(+), 33 deletions(-) diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index bf4f520c3..e35ddd9cb 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -420,9 +420,6 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.reserveCache = exports.downloadCacheStreaming = exports.downloadCacheSingleThread = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(42186)); @@ -431,7 +428,6 @@ const http_client_1 = __nccwpck_require__(96255); const auth_1 = __nccwpck_require__(35526); const crypto = __importStar(__nccwpck_require__(6113)); const utils = __importStar(__nccwpck_require__(91518)); -const os_1 = __importDefault(__nccwpck_require__(22037)); const downloadUtils_1 = __nccwpck_require__(55500); const requestUtils_1 = __nccwpck_require__(13981); const storage_1 = __nccwpck_require__(27577); @@ -607,8 +603,15 @@ function downloadCache(provider, archiveLocation, archivePath, gcsToken) { switch (provider) { case 's3': { - const numberOfConnections = 2 + os_1.default.cpus().length; - yield (0, downloadUtils_1.downloadCacheMultiConnection)(archiveLocation, archivePath, Math.min(numberOfConnections, 30)); + // const numberOfConnections = 2 + os.cpus().length + // await downloadCacheMultiConnection( + // archiveLocation, + // archivePath, + // Math.min(numberOfConnections, 30) + // ) + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, { + timeoutInMs: 30000 + }); } break; case 'gcs': { @@ -1125,7 +1128,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getDownloadCommandPipeForWget = exports.downloadCacheStreamingGCP = exports.downloadCacheGCP = exports.downloadCacheMultipartGCP = exports.downloadCacheMultiConnection = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheHttpClientConcurrent = exports.getDownloadCommandPipeForWget = exports.downloadCacheStreamingGCP = exports.downloadCacheGCP = exports.downloadCacheMultipartGCP = exports.downloadCacheMultiConnection = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(42186)); const http_client_1 = __nccwpck_require__(96255); const fs = __importStar(__nccwpck_require__(57147)); @@ -1266,7 +1269,7 @@ exports.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient('actions/cache'); + const httpClient = new http_client_1.HttpClient('Warpbuilds/cache'); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { @@ -1302,7 +1305,7 @@ function downloadCacheMultiConnection(archiveLocation, archivePath, connections) let downloadProgress = null; try { fileHandle = yield fs.promises.open(archivePath, 'w+'); - const httpClient = new http_client_1.HttpClient('actions/cache'); + const httpClient = new http_client_1.HttpClient('Warpbuilds/cache'); //Request 1 byte to get total content size const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation, { @@ -1432,6 +1435,126 @@ function getDownloadCommandPipeForWget(url) { return (0, child_process_1.spawn)('wget', ['-qO', '-', url]); } exports.getDownloadCommandPipeForWget = getDownloadCommandPipeForWget; +// Newer download tech +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('Warpbuilds/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} +const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise(resolve => { + timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then(result => { + clearTimeout(timeoutHandle); + return result; + }); +}); //# sourceMappingURL=downloadUtils.js.map /***/ }), @@ -9464,7 +9587,7 @@ class HttpClient { } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { - token: `${proxyUrl.username}:${proxyUrl.password}` + token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { @@ -9578,11 +9701,11 @@ function getProxyUrl(reqUrl) { })(); if (proxyVar) { try { - return new URL(proxyVar); + return new DecodedURL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) - return new URL(`http://${proxyVar}`); + return new DecodedURL(`http://${proxyVar}`); } } else { @@ -9641,6 +9764,19 @@ function isLoopbackAddress(host) { hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } +class DecodedURL extends URL { + constructor(url, base) { + super(url, base); + this._decodedUsername = decodeURIComponent(super.username); + this._decodedPassword = decodeURIComponent(super.password); + } + get username() { + return this._decodedUsername; + } + get password() { + return this._decodedPassword; + } +} //# sourceMappingURL=proxy.js.map /***/ }), diff --git a/dist/setup/index.js b/dist/setup/index.js index c12ac96d3..d19e9482c 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -420,9 +420,6 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.reserveCache = exports.downloadCacheStreaming = exports.downloadCacheSingleThread = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(42186)); @@ -431,7 +428,6 @@ const http_client_1 = __nccwpck_require__(96255); const auth_1 = __nccwpck_require__(35526); const crypto = __importStar(__nccwpck_require__(6113)); const utils = __importStar(__nccwpck_require__(91518)); -const os_1 = __importDefault(__nccwpck_require__(22037)); const downloadUtils_1 = __nccwpck_require__(55500); const requestUtils_1 = __nccwpck_require__(13981); const storage_1 = __nccwpck_require__(27577); @@ -607,8 +603,15 @@ function downloadCache(provider, archiveLocation, archivePath, gcsToken) { switch (provider) { case 's3': { - const numberOfConnections = 2 + os_1.default.cpus().length; - yield (0, downloadUtils_1.downloadCacheMultiConnection)(archiveLocation, archivePath, Math.min(numberOfConnections, 30)); + // const numberOfConnections = 2 + os.cpus().length + // await downloadCacheMultiConnection( + // archiveLocation, + // archivePath, + // Math.min(numberOfConnections, 30) + // ) + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, { + timeoutInMs: 30000 + }); } break; case 'gcs': { @@ -1125,7 +1128,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getDownloadCommandPipeForWget = exports.downloadCacheStreamingGCP = exports.downloadCacheGCP = exports.downloadCacheMultipartGCP = exports.downloadCacheMultiConnection = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; +exports.downloadCacheHttpClientConcurrent = exports.getDownloadCommandPipeForWget = exports.downloadCacheStreamingGCP = exports.downloadCacheGCP = exports.downloadCacheMultipartGCP = exports.downloadCacheMultiConnection = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(42186)); const http_client_1 = __nccwpck_require__(96255); const fs = __importStar(__nccwpck_require__(57147)); @@ -1266,7 +1269,7 @@ exports.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient('actions/cache'); + const httpClient = new http_client_1.HttpClient('Warpbuilds/cache'); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { @@ -1302,7 +1305,7 @@ function downloadCacheMultiConnection(archiveLocation, archivePath, connections) let downloadProgress = null; try { fileHandle = yield fs.promises.open(archivePath, 'w+'); - const httpClient = new http_client_1.HttpClient('actions/cache'); + const httpClient = new http_client_1.HttpClient('Warpbuilds/cache'); //Request 1 byte to get total content size const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation, { @@ -1432,6 +1435,126 @@ function getDownloadCommandPipeForWget(url) { return (0, child_process_1.spawn)('wget', ['-qO', '-', url]); } exports.getDownloadCommandPipeForWget = getDownloadCommandPipeForWget; +// Newer download tech +/** + * Download the cache using the Actions toolkit http-client concurrently + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); + const httpClient = new http_client_1.HttpClient('Warpbuilds/cache', undefined, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); + const lengthHeader = res.message.headers['content-length']; + if (lengthHeader === undefined || lengthHeader === null) { + throw new Error('Content-Length not found on blob response'); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) + }); + } + // reverse to use .pop instead of .shift + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while ((nextDownload = downloads.pop())) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } + finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); +} +exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; +function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 30000; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === 'string') { + throw new Error('downloadSegmentRetry failed due to timeout'); + } + return result; + } + catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); +} +function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); +} +const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise(resolve => { + timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then(result => { + clearTimeout(timeoutHandle); + return result; + }); +}); //# sourceMappingURL=downloadUtils.js.map /***/ }), @@ -9464,7 +9587,7 @@ class HttpClient { } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { - token: `${proxyUrl.username}:${proxyUrl.password}` + token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { @@ -9578,11 +9701,11 @@ function getProxyUrl(reqUrl) { })(); if (proxyVar) { try { - return new URL(proxyVar); + return new DecodedURL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) - return new URL(`http://${proxyVar}`); + return new DecodedURL(`http://${proxyVar}`); } } else { @@ -9641,6 +9764,19 @@ function isLoopbackAddress(host) { hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } +class DecodedURL extends URL { + constructor(url, base) { + super(url, base); + this._decodedUsername = decodeURIComponent(super.username); + this._decodedPassword = decodeURIComponent(super.password); + } + get username() { + return this._decodedUsername; + } + get password() { + return this._decodedPassword; + } +} //# sourceMappingURL=proxy.js.map /***/ }), diff --git a/package-lock.json b/package-lock.json index 31cc1d9c8..ff72f9482 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "4.1.0", "license": "MIT", "dependencies": { - "@actions/cache": "npm:github-actions.warp-cache@1.4.5", + "@actions/cache": "npm:github-actions.warp-cache@1.4.6", "@actions/core": "^1.10.0", "@actions/exec": "^1.0.4", "@actions/glob": "^0.4.0", @@ -48,16 +48,16 @@ }, "node_modules/@actions/cache": { "name": "github-actions.warp-cache", - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/github-actions.warp-cache/-/github-actions.warp-cache-1.4.5.tgz", - "integrity": "sha512-soTIk/9slFpK+InPtBSFFN8JAXOCKTBWQkP+gdylLCmtbZyMsaUAxUxAnsI7G4CsfAqE1IcPlyqDw0ZAtIIFrQ==", + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/github-actions.warp-cache/-/github-actions.warp-cache-1.4.6.tgz", + "integrity": "sha512-WAB/kEi4Knb0PxS0xSWGPKJ0UNU31LzK9vpt6IYpPdk32bXicq/ncxLwN/28zssxNF+f+T1Ch79/nZcjFGUi7g==", "license": "MIT", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/github": "^6.0.0", "@actions/glob": "^0.1.0", - "@actions/http-client": "^2.1.1", + "@actions/http-client": "^2.2.3", "@actions/io": "^1.0.1", "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", @@ -134,9 +134,10 @@ } }, "node_modules/@actions/http-client": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", - "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", + "license": "MIT", "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" diff --git a/package.json b/package.json index 3451eb72c..de1b85928 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "npm:github-actions.warp-cache@1.4.5", + "@actions/cache": "npm:github-actions.warp-cache@1.4.6", "@actions/core": "^1.10.0", "@actions/exec": "^1.0.4", "@actions/glob": "^0.4.0",