Permalink
Cannot retrieve contributors at this time
316 lines (281 sloc)
8.38 KB
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import * as core from '@actions/core' | |
| import {HttpClient} from '@actions/http-client' | |
| import {BearerCredentialHandler} from '@actions/http-client/auth' | |
| import {IRequestOptions, ITypedResponse} from '@actions/http-client/interfaces' | |
| import * as crypto from 'crypto' | |
| import * as fs from 'fs' | |
| import {URL} from 'url' | |
| import * as utils from './cacheUtils' | |
| import {CompressionMethod} from './constants' | |
| import { | |
| ArtifactCacheEntry, | |
| InternalCacheOptions, | |
| CommitCacheRequest, | |
| ReserveCacheRequest, | |
| ReserveCacheResponse | |
| } from './contracts' | |
| import {downloadCacheHttpClient, downloadCacheStorageSDK} from './downloadUtils' | |
| import { | |
| DownloadOptions, | |
| UploadOptions, | |
| getDownloadOptions, | |
| getUploadOptions | |
| } from '../options' | |
| import { | |
| isSuccessStatusCode, | |
| retryHttpClientResponse, | |
| retryTypedResponse | |
| } from './requestUtils' | |
| const versionSalt = '1.0' | |
| function getCacheApiUrl(resource: string): string { | |
| // Ideally we just use ACTIONS_CACHE_URL | |
| const baseUrl: string = ( | |
| process.env['ACTIONS_CACHE_URL'] || | |
| process.env['ACTIONS_RUNTIME_URL'] || | |
| '' | |
| ).replace('pipelines', 'artifactcache') | |
| if (!baseUrl) { | |
| throw new Error('Cache Service Url not found, unable to restore cache.') | |
| } | |
| const url = `${baseUrl}_apis/artifactcache/${resource}` | |
| core.debug(`Resource Url: ${url}`) | |
| return url | |
| } | |
| function createAcceptHeader(type: string, apiVersion: string): string { | |
| return `${type};api-version=${apiVersion}` | |
| } | |
| function getRequestOptions(): IRequestOptions { | |
| const requestOptions: IRequestOptions = { | |
| headers: { | |
| Accept: createAcceptHeader('application/json', '6.0-preview.1') | |
| } | |
| } | |
| return requestOptions | |
| } | |
| function createHttpClient(): HttpClient { | |
| const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '' | |
| const bearerCredentialHandler = new BearerCredentialHandler(token) | |
| return new HttpClient( | |
| 'actions/cache', | |
| [bearerCredentialHandler], | |
| getRequestOptions() | |
| ) | |
| } | |
| export function getCacheVersion( | |
| paths: string[], | |
| compressionMethod?: CompressionMethod | |
| ): string { | |
| const components = paths.concat( | |
| !compressionMethod || compressionMethod === CompressionMethod.Gzip | |
| ? [] | |
| : [compressionMethod] | |
| ) | |
| // Add salt to cache version to support breaking changes in cache entry | |
| components.push(versionSalt) | |
| return crypto | |
| .createHash('sha256') | |
| .update(components.join('|')) | |
| .digest('hex') | |
| } | |
| export async function getCacheEntry( | |
| keys: string[], | |
| paths: string[], | |
| options?: InternalCacheOptions | |
| ): Promise<ArtifactCacheEntry | null> { | |
| const httpClient = createHttpClient() | |
| const version = getCacheVersion(paths, options?.compressionMethod) | |
| const resource = `cache?keys=${encodeURIComponent( | |
| keys.join(',') | |
| )}&version=${version}` | |
| const response = await retryTypedResponse('getCacheEntry', async () => | |
| httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) | |
| ) | |
| if (response.statusCode === 204) { | |
| return null | |
| } | |
| if (!isSuccessStatusCode(response.statusCode)) { | |
| throw new Error(`Cache service responded with ${response.statusCode}`) | |
| } | |
| const cacheResult = response.result | |
| const cacheDownloadUrl = cacheResult?.archiveLocation | |
| if (!cacheDownloadUrl) { | |
| throw new Error('Cache not found.') | |
| } | |
| core.setSecret(cacheDownloadUrl) | |
| core.debug(`Cache Result:`) | |
| core.debug(JSON.stringify(cacheResult)) | |
| return cacheResult | |
| } | |
| export async function downloadCache( | |
| archiveLocation: string, | |
| archivePath: string, | |
| options?: DownloadOptions | |
| ): Promise<void> { | |
| const archiveUrl = new URL(archiveLocation) | |
| const downloadOptions = getDownloadOptions(options) | |
| if ( | |
| downloadOptions.useAzureSdk && | |
| archiveUrl.hostname.endsWith('.blob.core.windows.net') | |
| ) { | |
| // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. | |
| await downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions) | |
| } else { | |
| // Otherwise, download using the Actions http-client. | |
| await downloadCacheHttpClient(archiveLocation, archivePath) | |
| } | |
| } | |
| // Reserve Cache | |
| export async function reserveCache( | |
| key: string, | |
| paths: string[], | |
| options?: InternalCacheOptions | |
| ): Promise<number> { | |
| const httpClient = createHttpClient() | |
| const version = getCacheVersion(paths, options?.compressionMethod) | |
| const reserveCacheRequest: ReserveCacheRequest = { | |
| key, | |
| version | |
| } | |
| const response = await retryTypedResponse('reserveCache', async () => | |
| httpClient.postJson<ReserveCacheResponse>( | |
| getCacheApiUrl('caches'), | |
| reserveCacheRequest | |
| ) | |
| ) | |
| return response?.result?.cacheId ?? -1 | |
| } | |
| function getContentRange(start: number, end: number): string { | |
| // Format: `bytes start-end/filesize | |
| // start and end are inclusive | |
| // filesize can be * | |
| // For a 200 byte chunk starting at byte 0: | |
| // Content-Range: bytes 0-199/* | |
| return `bytes ${start}-${end}/*` | |
| } | |
| async function uploadChunk( | |
| httpClient: HttpClient, | |
| resourceUrl: string, | |
| openStream: () => NodeJS.ReadableStream, | |
| start: number, | |
| end: number | |
| ): Promise<void> { | |
| core.debug( | |
| `Uploading chunk of size ${end - | |
| start + | |
| 1} bytes at offset ${start} with content range: ${getContentRange( | |
| start, | |
| end | |
| )}` | |
| ) | |
| const additionalHeaders = { | |
| 'Content-Type': 'application/octet-stream', | |
| 'Content-Range': getContentRange(start, end) | |
| } | |
| const uploadChunkResponse = await retryHttpClientResponse( | |
| `uploadChunk (start: ${start}, end: ${end})`, | |
| async () => | |
| httpClient.sendStream( | |
| 'PATCH', | |
| resourceUrl, | |
| openStream(), | |
| additionalHeaders | |
| ) | |
| ) | |
| if (!isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { | |
| throw new Error( | |
| `Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.` | |
| ) | |
| } | |
| } | |
| async function uploadFile( | |
| httpClient: HttpClient, | |
| cacheId: number, | |
| archivePath: string, | |
| options?: UploadOptions | |
| ): Promise<void> { | |
| // Upload Chunks | |
| const fileSize = utils.getArchiveFileSizeInBytes(archivePath) | |
| const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`) | |
| const fd = fs.openSync(archivePath, 'r') | |
| const uploadOptions = getUploadOptions(options) | |
| const concurrency = utils.assertDefined( | |
| 'uploadConcurrency', | |
| uploadOptions.uploadConcurrency | |
| ) | |
| const maxChunkSize = utils.assertDefined( | |
| 'uploadChunkSize', | |
| uploadOptions.uploadChunkSize | |
| ) | |
| const parallelUploads = [...new Array(concurrency).keys()] | |
| core.debug('Awaiting all uploads') | |
| let offset = 0 | |
| try { | |
| await Promise.all( | |
| parallelUploads.map(async () => { | |
| while (offset < fileSize) { | |
| const chunkSize = Math.min(fileSize - offset, maxChunkSize) | |
| const start = offset | |
| const end = offset + chunkSize - 1 | |
| offset += maxChunkSize | |
| await uploadChunk( | |
| httpClient, | |
| resourceUrl, | |
| () => | |
| fs | |
| .createReadStream(archivePath, { | |
| fd, | |
| start, | |
| end, | |
| autoClose: false | |
| }) | |
| .on('error', error => { | |
| throw new Error( | |
| `Cache upload failed because file read failed with ${error.message}` | |
| ) | |
| }), | |
| start, | |
| end | |
| ) | |
| } | |
| }) | |
| ) | |
| } finally { | |
| fs.closeSync(fd) | |
| } | |
| return | |
| } | |
| async function commitCache( | |
| httpClient: HttpClient, | |
| cacheId: number, | |
| filesize: number | |
| ): Promise<ITypedResponse<null>> { | |
| const commitCacheRequest: CommitCacheRequest = {size: filesize} | |
| return await retryTypedResponse('commitCache', async () => | |
| httpClient.postJson<null>( | |
| getCacheApiUrl(`caches/${cacheId.toString()}`), | |
| commitCacheRequest | |
| ) | |
| ) | |
| } | |
| export async function saveCache( | |
| cacheId: number, | |
| archivePath: string, | |
| options?: UploadOptions | |
| ): Promise<void> { | |
| const httpClient = createHttpClient() | |
| core.debug('Upload cache') | |
| await uploadFile(httpClient, cacheId, archivePath, options) | |
| // Commit Cache | |
| core.debug('Commiting cache') | |
| const cacheSize = utils.getArchiveFileSizeInBytes(archivePath) | |
| core.info( | |
| `Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)` | |
| ) | |
| const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) | |
| if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | |
| throw new Error( | |
| `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` | |
| ) | |
| } | |
| core.info('Cache saved successfully') | |
| } |