@@ -208,25 +208,30 @@ export class UploadHttpClient {
208
208
// for creating a new GZip file, an in-memory buffer is used for compression
209
209
if ( totalFileSize < 65536 ) {
210
210
const buffer = await createGZipFileInBuffer ( parameters . file )
211
- let uploadStream : NodeJS . ReadableStream
211
+
212
+ //An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
213
+ // it will not properly get reset to the start of the stream if a chunk upload needs to be retried
214
+ let openUploadStream : ( ) => NodeJS . ReadableStream
212
215
213
216
if ( totalFileSize < buffer . byteLength ) {
214
217
// compression did not help with reducing the size, use a readable stream from the original file for upload
215
- uploadStream = fs . createReadStream ( parameters . file )
218
+ openUploadStream = ( ) => fs . createReadStream ( parameters . file )
216
219
isGzip = false
217
220
uploadFileSize = totalFileSize
218
221
} else {
219
222
// create a readable stream using a PassThrough stream that is both readable and writable
220
- const passThrough = new stream . PassThrough ( )
221
- passThrough . end ( buffer )
222
- uploadStream = passThrough
223
+ openUploadStream = ( ) => {
224
+ const passThrough = new stream . PassThrough ( )
225
+ passThrough . end ( buffer )
226
+ return passThrough
227
+ }
223
228
uploadFileSize = buffer . byteLength
224
229
}
225
230
226
231
const result = await this . uploadChunk (
227
232
httpClientIndex ,
228
233
parameters . resourceUrl ,
229
- uploadStream ,
234
+ openUploadStream ,
230
235
0 ,
231
236
uploadFileSize - 1 ,
232
237
uploadFileSize ,
@@ -296,11 +301,12 @@ export class UploadHttpClient {
296
301
const result = await this . uploadChunk (
297
302
httpClientIndex ,
298
303
parameters . resourceUrl ,
299
- fs . createReadStream ( uploadFilePath , {
300
- start,
301
- end,
302
- autoClose : false
303
- } ) ,
304
+ ( ) =>
305
+ fs . createReadStream ( uploadFilePath , {
306
+ start,
307
+ end,
308
+ autoClose : false
309
+ } ) ,
304
310
start ,
305
311
end ,
306
312
uploadFileSize ,
@@ -335,7 +341,7 @@ export class UploadHttpClient {
335
341
* indicates a retryable status, we try to upload the chunk as well
336
342
* @param {number } httpClientIndex The index of the httpClient being used to make all the necessary calls
337
343
* @param {string } resourceUrl Url of the resource that the chunk will be uploaded to
338
- * @param {NodeJS.ReadableStream } data Stream of the file that will be uploaded
344
+ * @param {NodeJS.ReadableStream } openStream Stream of the file that will be uploaded
339
345
* @param {number } start Starting byte index of file that the chunk belongs to
340
346
* @param {number } end Ending byte index of file that the chunk belongs to
341
347
* @param {number } uploadFileSize Total size of the file in bytes that is being uploaded
@@ -346,7 +352,7 @@ export class UploadHttpClient {
346
352
private async uploadChunk (
347
353
httpClientIndex : number ,
348
354
resourceUrl : string ,
349
- data : NodeJS . ReadableStream ,
355
+ openStream : ( ) => NodeJS . ReadableStream ,
350
356
start : number ,
351
357
end : number ,
352
358
uploadFileSize : number ,
@@ -365,7 +371,7 @@ export class UploadHttpClient {
365
371
366
372
const uploadChunkRequest = async ( ) : Promise < IHttpClientResponse > => {
367
373
const client = this . uploadHttpManager . getClient ( httpClientIndex )
368
- return await client . sendStream ( 'PUT' , resourceUrl , data , headers )
374
+ return await client . sendStream ( 'PUT' , resourceUrl , openStream ( ) , headers )
369
375
}
370
376
371
377
let retryCount = 0
0 commit comments