Skip to content

Commit

Permalink
feat: retry multipart Bucket.upload (#1509)
Browse files Browse the repository at this point in the history
* feat: retry multipart Bucket.upload

* removed log statement

* 馃 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md

* removed unused variables

* 馃 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
shaffeeullah and gcf-owl-bot[bot] committed Jul 22, 2021
1 parent cf53a5e commit 730d0a0
Show file tree
Hide file tree
Showing 2 changed files with 196 additions and 14 deletions.
63 changes: 49 additions & 14 deletions src/bucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import * as mime from 'mime-types';
import * as path from 'path';
import pLimit = require('p-limit');
import {promisify} from 'util';
import retry = require('async-retry');

// eslint-disable-next-line @typescript-eslint/no-var-requires
const snakeize = require('snakeize');
Expand Down Expand Up @@ -3723,6 +3724,54 @@ class Bucket extends ServiceObject {
optionsOrCallback?: UploadOptions | UploadCallback,
callback?: UploadCallback
): Promise<UploadResponse> | void {
const upload = () => {
const isMultipart = options.resumable === false;
const returnValue = retry(
async (bail: (err: Error) => void) => {
await new Promise<void>((resolve, reject) => {
const writable = newFile.createWriteStream(options);
if (options.onUploadProgress) {
writable.on('progress', options.onUploadProgress);
}
fs.createReadStream(pathString)
.pipe(writable)
.on('error', err => {
if (
isMultipart &&
this.storage.retryOptions.autoRetry &&
this.storage.retryOptions.retryableErrorFn!(err)
) {
return reject(err);
} else {
return bail(err);
}
})
.on('finish', () => {
return resolve();
});
});
},
{
retries: this.storage.retryOptions.maxRetries,
factor: this.storage.retryOptions.retryDelayMultiplier,
maxTimeout: this.storage.retryOptions.maxRetryDelay! * 1000, //convert to milliseconds
maxRetryTime: this.storage.retryOptions.totalTimeout! * 1000, //convert to milliseconds
}
);

if (!callback) {
return returnValue;
} else {
return returnValue
.then(() => {
if (callback) {
return callback!(null, newFile, newFile.metadata);
}
})
.catch(callback);
}
};

// eslint-disable-next-line @typescript-eslint/no-explicit-any
if ((global as any)['GCLOUD_SANDBOX_ENV']) {
return;
Expand Down Expand Up @@ -3779,20 +3828,6 @@ class Bucket extends ServiceObject {
upload();
});
}

function upload() {
const writable = newFile.createWriteStream(options);
if (options.onUploadProgress) {
writable.on('progress', options.onUploadProgress);
}
fs.createReadStream(pathString)
.on('error', callback!)
.pipe(writable)
.on('error', callback!)
.on('finish', () => {
callback!(null, newFile, newFile.metadata);
});
}
}

makeAllFilesPublicPrivate_(
Expand Down
147 changes: 147 additions & 0 deletions test/bucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ import {
import {AddAclOptions} from '../src/acl';
import {Policy} from '../src/iam';
import sinon = require('sinon');
import {Transform} from 'stream';

class FakeFile {
calledWith_: IArguments;
Expand Down Expand Up @@ -167,6 +168,14 @@ const fakeSigner = {
URLSigner: () => {},
};

class HTTPError extends Error {
code: number;
constructor(message: string, code: number) {
super(message);
this.code = code;
}
}

describe('Bucket', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let Bucket: any;
Expand All @@ -175,6 +184,16 @@ describe('Bucket', () => {

const STORAGE = {
createBucket: util.noop,
retryOptions: {
autoRetry: true,
maxRetries: 3,
retryDelayMultipier: 2,
totalTimeout: 600,
maxRetryDelay: 60,
retryableErrorFn: (err: HTTPError) => {
return err.code === 500;
},
},
};
const BUCKET_NAME = 'test-bucket';

Expand Down Expand Up @@ -2477,6 +2496,134 @@ describe('Bucket', () => {
});
});

describe('multipart uploads', () => {
class DelayedStream500Error extends Transform {
retryCount: number;
constructor(retryCount: number) {
super();
this.retryCount = retryCount;
}
_transform(chunk: string | Buffer, _encoding: string, done: Function) {
this.push(chunk);
setTimeout(() => {
if (this.retryCount === 1) {
done(new HTTPError('first error', 500));
} else {
done();
}
}, 5);
}
}

beforeEach(() => {
fsStatOverride = (path: string, callback: Function) => {
callback(null, {size: 1}); // Small size to guarantee simple upload
};
});

it('should save with no errors', done => {
const fakeFile = new FakeFile(bucket, 'file-name');
const options = {destination: fakeFile, resumable: false};
fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => {
class DelayedStreamNoError extends Transform {
_transform(
chunk: string | Buffer,
_encoding: string,
done: Function
) {
this.push(chunk);
setTimeout(() => {
done();
}, 5);
}
}
assert.strictEqual(options_.resumable, false);
return new DelayedStreamNoError();
};
bucket.upload(filepath, options, (err: Error) => {
assert.ifError(err);
done();
});
});

it('should retry on first failure', done => {
const fakeFile = new FakeFile(bucket, 'file-name');
const options = {destination: fakeFile, resumable: false};
let retryCount = 0;
fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => {
setImmediate(() => {
assert.strictEqual(options_.resumable, false);
retryCount++;
done();
});
return new DelayedStream500Error(retryCount);
};
bucket.upload(filepath, options, (err: Error, file: FakeFile) => {
assert.ifError(err);
assert(file.isSameFile());
assert.deepStrictEqual(file.metadata, metadata);
assert.ok(retryCount === 2);
done();
});
});

it('should not retry if nonretryable error code', done => {
const fakeFile = new FakeFile(bucket, 'file-name');
const options = {destination: fakeFile, resumable: false};
let retryCount = 0;
fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => {
class DelayedStream403Error extends Transform {
_transform(
chunk: string | Buffer,
_encoding: string,
done: Function
) {
this.push(chunk);
setTimeout(() => {
retryCount++;
if (retryCount === 1) {
done(new HTTPError('first error', 403));
} else {
done();
}
}, 5);
}
}
setImmediate(() => {
assert.strictEqual(options_.resumable, false);
retryCount++;
done();
});
return new DelayedStream403Error();
};

bucket.upload(filepath, options, (err: Error) => {
assert.strictEqual(err.message, 'first error');
assert.ok(retryCount === 2);
done();
});
});

it('non-multipart upload should not retry', done => {
const fakeFile = new FakeFile(bucket, 'file-name');
const options = {destination: fakeFile, resumable: true};
let retryCount = 0;
fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => {
setImmediate(() => {
assert.strictEqual(options_.resumable, true);
retryCount++;
done();
});
return new DelayedStream500Error(retryCount);
};
bucket.upload(filepath, options, (err: Error) => {
assert.strictEqual(err.message, 'first error');
assert.ok(retryCount === 1);
done();
});
});
});

it('should allow overriding content type', done => {
const fakeFile = new FakeFile(bucket, 'file-name');
const metadata = {contentType: 'made-up-content-type'};
Expand Down

0 comments on commit 730d0a0

Please sign in to comment.