Skip to content

Commit

Permalink
feat: MPU for transfer manager (#2192)
Browse files Browse the repository at this point in the history
* feat: MPU for transfer manager

* add tests for MPU upload

* naming fix in tests

* return full response from completeUpload

* add comment about queueing mechanism

* fix typo

* add md5 validation to uploadFileInChunks
  • Loading branch information
ddelgrosso1 committed Jul 12, 2023
1 parent 37ce0a6 commit ae83421
Show file tree
Hide file tree
Showing 4 changed files with 519 additions and 19 deletions.
62 changes: 45 additions & 17 deletions internal-tooling/performTransferManagerTest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ const argv = yargs(process.argv.slice(2))
* to the parent thread.
*/
async function main() {
let result: TestResult | undefined = undefined;
let results: TestResult[] = [];

({bucket, transferManager} = await performanceTestSetup(
argv.project!,
Expand All @@ -60,18 +60,18 @@ async function main() {

switch (argv.test_type) {
case PERFORMANCE_TEST_TYPES.TRANSFER_MANAGER_UPLOAD_MANY_FILES:
result = await performUploadManyFilesTest();
results = await performUploadManyFilesTest();
break;
case PERFORMANCE_TEST_TYPES.TRANSFER_MANAGER_DOWNLOAD_MANY_FILES:
result = await performDownloadManyFilesTest();
results = await performDownloadManyFilesTest();
break;
case PERFORMANCE_TEST_TYPES.TRANSFER_MANAGER_CHUNKED_FILE_DOWNLOAD:
result = await performDownloadFileInChunksTest();
results = await performChunkUploadDownloadTest();
break;
default:
break;
}
parentPort?.postMessage(result);
parentPort?.postMessage(results);
await performTestCleanup();
}

Expand All @@ -87,7 +87,7 @@ async function performTestCleanup() {
*
* @returns {Promise<TestResult>} A promise that resolves containing information about the test results.
*/
async function performUploadManyFilesTest(): Promise<TestResult> {
async function performUploadManyFilesTest(): Promise<TestResult[]> {
const fileSizeRange = getLowHighFileSize(argv.object_size);
const creationInfo = generateRandomDirectoryStructure(
argv.num_objects,
Expand Down Expand Up @@ -126,15 +126,15 @@ async function performUploadManyFilesTest(): Promise<TestResult> {
bucketName: bucket.name,
};

return result;
return [result];
}

/**
* Performs a test where multiple objects are downloaded in parallel from a bucket.
*
* @returns {Promise<TestResult>} A promise that resolves containing information about the test results.
* @returns {Promise<TestResult[]>} A promise that resolves containing information about the test results.
*/
async function performDownloadManyFilesTest(): Promise<TestResult> {
async function performDownloadManyFilesTest(): Promise<TestResult[]> {
const fileSizeRange = getLowHighFileSize(argv.object_size);
const creationInfo = generateRandomDirectoryStructure(
argv.num_objects,
Expand Down Expand Up @@ -179,15 +179,17 @@ async function performDownloadManyFilesTest(): Promise<TestResult> {
transferOffset: 0,
bucketName: bucket.name,
};
return result;

return [result];
}

/**
* Performs a test where a large file is downloaded as chunks in parallel.
* Performs a test where a large file is uploaded and downloaded as chunks in parallel.
*
* @returns {Promise<TestResult>} A promise that resolves containing information about the test results.
*/
async function performDownloadFileInChunksTest(): Promise<TestResult> {
async function performChunkUploadDownloadTest(): Promise<TestResult[]> {
const results: TestResult[] = [];
const fileSizeRange = getLowHighFileSize(argv.object_size);
const fileName = generateRandomFileName(TEST_NAME_STRING);
const sizeInBytes = generateRandomFile(
Expand All @@ -197,21 +199,46 @@ async function performDownloadFileInChunksTest(): Promise<TestResult> {
__dirname
);
const file = bucket.file(`${fileName}`);
let result: TestResult = {
op: 'WRITE',
objectSize: sizeInBytes,
appBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32cEnabled: checkType === 'crc32c',
md5Enabled: false,
api: 'JSON',
elapsedTimeUs: -1,
cpuTimeUs: -1,
status: 'OK',
chunkSize: argv.range_read_size,
workers: argv.workers,
library: 'nodejs',
transferSize: sizeInBytes,
transferOffset: 0,
bucketName: bucket.name,
};

await bucket.upload(`${__dirname}/${fileName}`);
let start = performance.now();
await transferManager.uploadFileInChunks(`${__dirname}/${fileName}`, {
concurrencyLimit: argv.workers,
chunkSizeBytes: argv.range_read_size,
});
let end = performance.now();
result.elapsedTimeUs = Math.round((end - start) * 1000);
results.push(result);
cleanupFile(fileName);
const start = performance.now();

start = performance.now();
await transferManager.downloadFileInChunks(file, {
concurrencyLimit: argv.workers,
chunkSizeBytes: argv.range_read_size,
destination: path.join(__dirname, fileName),
validation: checkType === 'crc32c' ? checkType : false,
});
const end = performance.now();
end = performance.now();

cleanupFile(fileName);

const result: TestResult = {
result = {
op: 'READ[0]',
objectSize: sizeInBytes,
appBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
Expand All @@ -228,8 +255,9 @@ async function performDownloadFileInChunksTest(): Promise<TestResult> {
transferOffset: 0,
bucketName: bucket.name,
};
results.push(result);

return result;
return results;
}

main();
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
"duplexify": "^4.0.0",
"ent": "^2.2.0",
"extend": "^3.0.2",
"fast-xml-parser": "^4.2.2",
"gaxios": "^5.0.0",
"google-auth-library": "^8.0.1",
"mime": "^3.0.0",
Expand Down

0 comments on commit ae83421

Please sign in to comment.