Skip to content

Commit 628f82f

Browse files
authored
Correctly reset chunk during artifact upload on retry (actions#458)
* Correctly reset chunk during artifact upload on retry * Update workflow * Implementation details around the passthrough stream
1 parent 6d83c79 commit 628f82f

File tree

3 files changed

+34
-16
lines changed

3 files changed

+34
-16
lines changed

.github/workflows/artifact-tests.yml

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,21 @@
11
name: artifact-unit-tests
2-
on: push
2+
on:
3+
push:
4+
branches:
5+
- master
6+
paths-ignore:
7+
- '**.md'
8+
pull_request:
9+
paths-ignore:
10+
- '**.md'
311

412
jobs:
513
build:
614
name: Build
715

816
strategy:
917
matrix:
10-
runs-on: [ubuntu-latest, windows-latest, macOS-latest]
18+
runs-on: [ubuntu-latest, windows-latest, macos-latest]
1119
fail-fast: false
1220

1321
runs-on: ${{ matrix.runs-on }}

packages/artifact/docs/implementation-details.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,10 @@ Warning: Implementation details may change at any time without notice. This is m
66

77
![image](https://user-images.githubusercontent.com/16109154/79765587-19522b00-8327-11ea-9679-410bb10e1b13.png)
88

9+
During artifact upload, gzip is used to compress individual files that then get uploaded. This is used to minimize the amount of data that gets uploaded which reduces the total amount of HTTP calls (upload happens in 4MB chunks). This results in considerably faster uploads with huge performance implications especially on self-hosted runners.
10+
11+
If a file is less than 64KB in size, a passthrough stream (readable and writable) is used to convert an in-memory buffer into a readable stream without any extra streams or pipping.
12+
913
## Retry Logic when downloading an individual file
1014

1115
![image](https://user-images.githubusercontent.com/16109154/78555461-5be71400-780d-11ea-9abd-b05b77a95a3f.png)

packages/artifact/src/internal/upload-http-client.ts

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -208,25 +208,30 @@ export class UploadHttpClient {
208208
// for creating a new GZip file, an in-memory buffer is used for compression
209209
if (totalFileSize < 65536) {
210210
const buffer = await createGZipFileInBuffer(parameters.file)
211-
let uploadStream: NodeJS.ReadableStream
211+
212+
//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
213+
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
214+
let openUploadStream: () => NodeJS.ReadableStream
212215

213216
if (totalFileSize < buffer.byteLength) {
214217
// compression did not help with reducing the size, use a readable stream from the original file for upload
215-
uploadStream = fs.createReadStream(parameters.file)
218+
openUploadStream = () => fs.createReadStream(parameters.file)
216219
isGzip = false
217220
uploadFileSize = totalFileSize
218221
} else {
219222
// create a readable stream using a PassThrough stream that is both readable and writable
220-
const passThrough = new stream.PassThrough()
221-
passThrough.end(buffer)
222-
uploadStream = passThrough
223+
openUploadStream = () => {
224+
const passThrough = new stream.PassThrough()
225+
passThrough.end(buffer)
226+
return passThrough
227+
}
223228
uploadFileSize = buffer.byteLength
224229
}
225230

226231
const result = await this.uploadChunk(
227232
httpClientIndex,
228233
parameters.resourceUrl,
229-
uploadStream,
234+
openUploadStream,
230235
0,
231236
uploadFileSize - 1,
232237
uploadFileSize,
@@ -296,11 +301,12 @@ export class UploadHttpClient {
296301
const result = await this.uploadChunk(
297302
httpClientIndex,
298303
parameters.resourceUrl,
299-
fs.createReadStream(uploadFilePath, {
300-
start,
301-
end,
302-
autoClose: false
303-
}),
304+
() =>
305+
fs.createReadStream(uploadFilePath, {
306+
start,
307+
end,
308+
autoClose: false
309+
}),
304310
start,
305311
end,
306312
uploadFileSize,
@@ -335,7 +341,7 @@ export class UploadHttpClient {
335341
* indicates a retryable status, we try to upload the chunk as well
336342
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
337343
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
338-
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
344+
* @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
339345
* @param {number} start Starting byte index of file that the chunk belongs to
340346
* @param {number} end Ending byte index of file that the chunk belongs to
341347
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
@@ -346,7 +352,7 @@ export class UploadHttpClient {
346352
private async uploadChunk(
347353
httpClientIndex: number,
348354
resourceUrl: string,
349-
data: NodeJS.ReadableStream,
355+
openStream: () => NodeJS.ReadableStream,
350356
start: number,
351357
end: number,
352358
uploadFileSize: number,
@@ -365,7 +371,7 @@ export class UploadHttpClient {
365371

366372
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
367373
const client = this.uploadHttpManager.getClient(httpClientIndex)
368-
return await client.sendStream('PUT', resourceUrl, data, headers)
374+
return await client.sendStream('PUT', resourceUrl, openStream(), headers)
369375
}
370376

371377
let retryCount = 0

0 commit comments

Comments
 (0)