Skip to content
This repository was archived by the owner on Mar 3, 2026. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
166 changes: 87 additions & 79 deletions internal-tooling/performApplicationPerformanceTest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*/

import yargs from 'yargs';
import {promises as fsp} from 'fs';
import {promises as fsp, rmSync} from 'fs';
import {
Bucket,
DownloadOptions,
Expand All @@ -27,22 +27,21 @@ import {performance} from 'perf_hooks';
// eslint-disable-next-line node/no-unsupported-features/node-builtins
import {parentPort} from 'worker_threads';
import {
BLOCK_SIZE_IN_BYTES,
DEFAULT_PROJECT_ID,
DEFAULT_NUMBER_OF_OBJECTS,
DEFAULT_SMALL_FILE_SIZE_BYTES,
DEFAULT_LARGE_FILE_SIZE_BYTES,
NODE_DEFAULT_HIGHWATER_MARK_BYTES,
generateRandomDirectoryStructure,
getValidationType,
performanceTestSetup,
TestResult,
} from './performanceUtils';
import {TRANSFER_MANAGER_TEST_TYPES} from './performanceTest';

const TEST_NAME_STRING = 'nodejs-perf-metrics';
const DEFAULT_NUMBER_OF_WRITES = 1;
const DEFAULT_NUMBER_OF_READS = 3;
const TEST_NAME_STRING = 'nodejs-perf-metrics-application';
const DEFAULT_BUCKET_NAME = 'nodejs-perf-metrics-shaffeeullah';
const DEFAULT_SMALL_FILE_SIZE_BYTES = 5120;
const DEFAULT_LARGE_FILE_SIZE_BYTES = 2.147e9;
const BLOCK_SIZE_IN_BYTES = 1024;
const NODE_DEFAULT_HIGHWATER_MARK_BYTES = 16384;

let stg: Storage;
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like this can be removed.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fixed and pushed directly to your branch

let bucket: Bucket;
Expand All @@ -55,6 +54,7 @@ const argv = yargs(process.argv.slice(2))
small: {type: 'number', default: DEFAULT_SMALL_FILE_SIZE_BYTES},
large: {type: 'number', default: DEFAULT_LARGE_FILE_SIZE_BYTES},
projectid: {type: 'string', default: DEFAULT_PROJECT_ID},
numobjects: {type: 'number', default: DEFAULT_NUMBER_OF_OBJECTS},
})
.parseSync();

Expand All @@ -63,23 +63,35 @@ const argv = yargs(process.argv.slice(2))
* to the parent thread.
*/
async function main() {
let results: TestResult[] = [];
let result: TestResult = {
op: '',
objectSize: 0,
appBufferSize: 0,
libBufferSize: 0,
crc32Enabled: false,
md5Enabled: false,
apiName: 'JSON',
elapsedTimeUs: 0,
cpuTimeUs: 0,
status: '[OK]',
};

({bucket} = await performanceTestSetup(argv.projectid, argv.bucket));

switch (argv.testtype) {
case TRANSFER_MANAGER_TEST_TYPES.APPLICATION_UPLOAD_MULTIPLE_OBJECTS:
results = await performWriteTest();
result = await performWriteTest();
break;
case TRANSFER_MANAGER_TEST_TYPES.APPLICATION_DOWNLOAD_MULTIPLE_OBJECTS:
results = await performReadTest();
result = await performReadTest();
break;
// case TRANSFER_MANAGER_TEST_TYPES.APPLICATION_LARGE_FILE_DOWNLOAD:
// results = await performLargeReadTest();
// result = await performLargeReadTest();
// break;
default:
break;
}
parentPort?.postMessage(results);
parentPort?.postMessage(result);
}

async function uploadInParallel(
Expand Down Expand Up @@ -112,79 +124,75 @@ async function downloadInParallel(bucket: Bucket, options: DownloadOptions) {
/**
* Performs an iteration of the Write multiple objects test.
*
* @returns {Promise<TestResult[]>} Promise that resolves to an array of test results for the iteration.
* @returns {Promise<TestResult>} Promise that resolves to a test result of an iteration.
*/
async function performWriteTest(): Promise<TestResult[]> {
const results: TestResult[] = [];
const directory = TEST_NAME_STRING;
const directories = generateRandomDirectoryStructure(10, directory);

for (let j = 0; j < DEFAULT_NUMBER_OF_WRITES; j++) {
let start = 0;
let end = 0;

const iterationResult: TestResult = {
op: 'WRITE',
objectSize: BLOCK_SIZE_IN_BYTES, //note this is wrong
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: false,
md5Enabled: false,
apiName: 'JSON',
elapsedTimeUs: 0,
cpuTimeUs: -1,
status: '[OK]',
};

bucket = stg.bucket(argv.bucket, {
preconditionOpts: {
ifGenerationMatch: 0,
},
});

await bucket.deleteFiles(); //cleanup anything old
start = performance.now();
await uploadInParallel(bucket, directories.paths, {validation: checkType});
end = performance.now();

iterationResult.elapsedTimeUs = Math.round((end - start) * 1000);
results.push(iterationResult);
}
return results;
async function performWriteTest(): Promise<TestResult> {
await bucket.deleteFiles(); //start clean

const creationInfo = generateRandomDirectoryStructure(
argv.numobjects,
TEST_NAME_STRING,
argv.small,
argv.large
);

const start = performance.now();
await uploadInParallel(bucket, creationInfo.paths, {validation: checkType});
const end = performance.now();

await bucket.deleteFiles(); //cleanup files
rmSync(TEST_NAME_STRING, {recursive: true, force: true});

const result: TestResult = {
op: 'WRITE',
objectSize: creationInfo.totalSizeInBytes,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: checkType === 'crc32c',
md5Enabled: checkType === 'md5',
apiName: 'JSON',
elapsedTimeUs: Math.round((end - start) * 1000),
cpuTimeUs: -1,
status: '[OK]',
};
return result;
}

/**
* Performs an iteration of the read multiple objects test.
*
* @returns {Promise<TestResult[]>} Promise that resolves to an array of test results for the iteration.
* @returns {Promise<TestResult>} Promise that resolves to an array of test results for the iteration.
*/
async function performReadTest(): Promise<TestResult[]> {
const results: TestResult[] = [];
bucket = stg.bucket(argv.bucket);
for (let j = 0; j < DEFAULT_NUMBER_OF_READS; j++) {
let start = 0;
let end = 0;
const iterationResult: TestResult = {
op: `READ[${j}]`,
objectSize: 0, //this is wrong
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: false,
md5Enabled: false,
apiName: 'JSON',
elapsedTimeUs: 0,
cpuTimeUs: -1,
status: '[OK]',
};

start = performance.now();
await downloadInParallel(bucket, {validation: checkType});
end = performance.now();

iterationResult.elapsedTimeUs = Math.round((end - start) * 1000);
results.push(iterationResult);
}
return results;
async function performReadTest(): Promise<TestResult> {
await bucket.deleteFiles(); // start clean
const creationInfo = generateRandomDirectoryStructure(
argv.numobjects,
TEST_NAME_STRING,
argv.small,
argv.large
);
await uploadInParallel(bucket, creationInfo.paths, {validation: checkType});

const start = performance.now();
await downloadInParallel(bucket, {validation: checkType});
const end = performance.now();

const result: TestResult = {
op: 'READ',
objectSize: creationInfo.totalSizeInBytes,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: checkType === 'crc32c',
md5Enabled: checkType === 'md5',
apiName: 'JSON',
elapsedTimeUs: Math.round((end - start) * 1000),
cpuTimeUs: -1,
status: '[OK]',
};

rmSync(TEST_NAME_STRING, {recursive: true, force: true});
await bucket.deleteFiles(); //cleanup
return result;
}

main();
82 changes: 40 additions & 42 deletions internal-tooling/performTransferManagerTest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import {
generateRandomFileName,
getValidationType,
NODE_DEFAULT_HIGHWATER_MARK_BYTES,
DEFAULT_NUMBER_OF_OBJECTS,
performanceTestSetup,
TestResult,
} from './performanceUtils';
Expand All @@ -39,7 +40,6 @@ import {rmSync} from 'fs';
const TEST_NAME_STRING = 'tm-perf-metrics';
const DEFAULT_BUCKET_NAME = 'nodejs-transfer-manager-perf-metrics';
const DEFAULT_NUMBER_OF_PROMISES = 2;
const DEFAULT_NUMBER_OF_OBJECTS = 1000;
const DEFAULT_CHUNK_SIZE_BYTES = 16 * 1024 * 1024;
const DIRECTORY_PROBABILITY = 0.1;

Expand Down Expand Up @@ -120,26 +120,14 @@ async function performTestCleanup() {
* @returns {Promise<TestResult>} A promise that resolves containing information about the test results.
*/
async function performUploadMultipleObjectsTest(): Promise<TestResult> {
const result: TestResult = {
op: 'WRITE',
objectSize: 0,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: checkType === 'crc32c',
md5Enabled: checkType === 'md5',
apiName: 'JSON',
elapsedTimeUs: 0,
cpuTimeUs: -1,
status: '[OK]',
};
const creationInfo = generateRandomDirectoryStructure(
argv.numobjects,
TEST_NAME_STRING,
argv.small,
argv.large,
DIRECTORY_PROBABILITY
);
result.objectSize = creationInfo.totalSizeInBytes;

const start = performance.now();
await transferManager.uploadMulti(creationInfo.paths, {
concurrencyLimit: argv.numpromises,
Expand All @@ -149,39 +137,38 @@ async function performUploadMultipleObjectsTest(): Promise<TestResult> {
});
const end = performance.now();

result.elapsedTimeUs = Math.round((end - start) * 1000);
rmSync(TEST_NAME_STRING, {recursive: true, force: true});

return result;
}

/**
* Performs a test where multiple objects are downloaded in parallel from a bucket.
*
* @returns {Promise<TestResult>} A promise that resolves containing information about the test results.
*/
async function performDownloadMultipleObjectsTest(): Promise<TestResult> {
const result: TestResult = {
op: 'READ',
objectSize: 0,
op: 'WRITE',
objectSize: creationInfo.totalSizeInBytes,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: checkType === 'crc32c',
md5Enabled: checkType === 'md5',
apiName: 'JSON',
elapsedTimeUs: 0,
elapsedTimeUs: Math.round((end - start) * 1000),
cpuTimeUs: -1,
status: '[OK]',
};

return result;
}

/**
* Performs a test where multiple objects are downloaded in parallel from a bucket.
*
* @returns {Promise<TestResult>} A promise that resolves containing information about the test results.
*/
async function performDownloadMultipleObjectsTest(): Promise<TestResult> {
const creationInfo = generateRandomDirectoryStructure(
argv.numobjects,
TEST_NAME_STRING,
argv.small,
argv.large,
DIRECTORY_PROBABILITY
);
result.objectSize = creationInfo.totalSizeInBytes;

await transferManager.uploadMulti(creationInfo.paths, {
concurrencyLimit: argv.numpromises,
passthroughOptions: {
Expand All @@ -198,9 +185,20 @@ async function performDownloadMultipleObjectsTest(): Promise<TestResult> {
});
const end = performance.now();

result.elapsedTimeUs = Math.round((end - start) * 1000);
rmSync(TEST_NAME_STRING, {recursive: true, force: true});

const result: TestResult = {
op: 'READ',
objectSize: creationInfo.totalSizeInBytes,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: checkType === 'crc32c',
md5Enabled: checkType === 'md5',
apiName: 'JSON',
elapsedTimeUs: Math.round((end - start) * 1000),
cpuTimeUs: -1,
status: '[OK]',
};
return result;
}

Expand All @@ -217,18 +215,6 @@ async function performDownloadLargeFileTest(): Promise<TestResult> {
argv.large,
__dirname
);
const result: TestResult = {
op: 'READ',
objectSize: sizeInBytes,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: false,
md5Enabled: false,
apiName: 'JSON',
elapsedTimeUs: 0,
cpuTimeUs: -1,
status: '[OK]',
};
const file = bucket.file(`${fileName}`);

await bucket.upload(`${__dirname}/${fileName}`);
Expand All @@ -241,9 +227,21 @@ async function performDownloadLargeFileTest(): Promise<TestResult> {
});
const end = performance.now();

result.elapsedTimeUs = Math.round((end - start) * 1000);
cleanupFile(fileName);

const result: TestResult = {
op: 'READ',
objectSize: sizeInBytes,
appBufferSize: BLOCK_SIZE_IN_BYTES,
libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES,
crc32Enabled: false,
md5Enabled: false,
apiName: 'JSON',
elapsedTimeUs: Math.round((end - start) * 1000),
cpuTimeUs: -1,
status: '[OK]',
};

return result;
}

Expand Down
Loading