diff --git a/circle.yml b/circle.yml index 44661bd494..f46158360c 100644 --- a/circle.yml +++ b/circle.yml @@ -20,11 +20,16 @@ machine: REPORT_TOKEN: report-token-1 hosts: bucketwebsitetester.s3-website-us-east-1.amazonaws.com: 127.0.0.1 + post: + - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - + - echo "deb https://dl.yarnpkg.com/debian/ stable main" | + sudo tee /etc/apt/sources.list.d/yarn.list + - sudo apt-get update && sudo apt-get install yarn -y dependencies: override: - rm -rf node_modules - - npm install + - yarn install --pure-lockfile post: - sudo pip install flake8 yamllint - sudo pip install s3cmd==1.6.1 diff --git a/constants.js b/constants.js index 85492d3190..365517f2c3 100644 --- a/constants.js +++ b/constants.js @@ -123,8 +123,11 @@ const constants = { // for external backends, don't call unless at least 1 minute // (60,000 milliseconds) since last call externalBackendHealthCheckInterval: 60000, - versioningNotImplBackends: { azure: true }, - mpuMDStoredExternallyBackend: { aws_s3: true }, + versioningNotImplBackends: { azure: true, gcp: true }, + mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true }, + skipBatchDeleteBackends: { azure: true, gcp: true }, + s3HandledBackends: { azure: true, gcp: true }, + hasCopyPartBackends: { aws_s3: true, gcp: true }, /* eslint-enable camelcase */ mpuMDStoredOnS3Backend: { azure: true }, azureAccountNameRegex: /^[a-z0-9]{3,24}$/, diff --git a/lib/Config.js b/lib/Config.js index 11ae500b56..36d73611e6 100644 --- a/lib/Config.js +++ b/lib/Config.js @@ -876,7 +876,8 @@ class Config extends EventEmitter { } getLocationConstraintType(locationConstraint) { - return this.locationConstraints[locationConstraint].type; + const dataStoreName = this.locationConstraints[locationConstraint]; + return dataStoreName && dataStoreName.type; } setRestEndpoints(restEndpoints) { diff --git a/lib/api/objectPutPart.js b/lib/api/objectPutPart.js index 8667477add..9b34ee4d88 100644 --- a/lib/api/objectPutPart.js +++ b/lib/api/objectPutPart.js @@ -201,7 +201,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log, // if data backend handles MPU, skip to end of waterfall return next(skipError, destinationBucket, partInfo.dataStoreETag); - } else if (partInfo && partInfo.dataStoreType === 'azure') { + } else if (partInfo && + constants.s3HandledBackends[partInfo.dataStoreType]) { return next(null, destinationBucket, objectLocationConstraint, cipherBundle, splitter, partInfo); @@ -250,7 +251,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log, (destinationBucket, objectLocationConstraint, cipherBundle, partKey, prevObjectSize, oldLocations, partInfo, next) => { // NOTE: set oldLocations to null so we do not batchDelete for now - if (partInfo && partInfo.dataStoreType === 'azure') { + if (partInfo && + constants.skipBatchDeleteBackends[partInfo.dataStoreType]) { // skip to storing metadata return next(null, destinationBucket, partInfo, partInfo.dataStoreETag, diff --git a/lib/data/external/GCP/GcpApis/listParts.js b/lib/data/external/GCP/GcpApis/listParts.js index 46ce079a1e..4c745b7db8 100644 --- a/lib/data/external/GCP/GcpApis/listParts.js +++ b/lib/data/external/GCP/GcpApis/listParts.js @@ -18,9 +18,15 @@ function listParts(params, callback) { logHelper(logger, 'error', 'error in listParts', error); return callback(error); } + if (params.PartNumberMarker && params.PartNumberMarker < 0) { + return callback(errors.InvalidArgument + .customizeDescription('The request specified an invalid marker')); + } const mpuParams = { Bucket: params.Bucket, Prefix: createMpuKey(params.Key, params.UploadId, 'parts'), + Marker: createMpuKey(params.Key, params.UploadId, + params.PartNumberMarker, 'parts'), MaxKeys: params.MaxParts, }; return this.listObjects(mpuParams, (err, res) => { diff --git a/lib/data/external/GCP/GcpApis/uploadPart.js b/lib/data/external/GCP/GcpApis/uploadPart.js index 602b6ec419..6bb44bbbb4 100644 --- a/lib/data/external/GCP/GcpApis/uploadPart.js +++ b/lib/data/external/GCP/GcpApis/uploadPart.js @@ -1,5 +1,5 @@ const { errors } = require('arsenal'); -const { createMpuKey, logger } = require('../GcpUtils'); +const { getPartNumber, createMpuKey, logger } = require('../GcpUtils'); const { logHelper } = require('../../utils'); /** @@ -17,9 +17,16 @@ function uploadPart(params, callback) { logHelper(logger, 'error', 'error in uploadPart', error); return callback(error); } + const partNumber = getPartNumber(params.PartNumber); + if (!partNumber) { + const error = errors.InvalidArgument + .customizeDescription('PartNumber is invalid'); + logHelper(logger, 'debug', 'error in uploadPart', error); + return callback(error); + } const mpuParams = { Bucket: params.Bucket, - Key: createMpuKey(params.Key, params.UploadId, params.PartNumber), + Key: createMpuKey(params.Key, params.UploadId, partNumber), Body: params.Body, ContentLength: params.ContentLength, }; diff --git a/lib/data/external/GCP/GcpApis/uploadPartCopy.js b/lib/data/external/GCP/GcpApis/uploadPartCopy.js index 14ffed6bef..3fb8b6ccc4 100644 --- a/lib/data/external/GCP/GcpApis/uploadPartCopy.js +++ b/lib/data/external/GCP/GcpApis/uploadPartCopy.js @@ -1,5 +1,5 @@ const { errors } = require('arsenal'); -const { createMpuKey, logger } = require('../GcpUtils'); +const { getPartNumber, createMpuKey, logger } = require('../GcpUtils'); const { logHelper } = require('../../utils'); /** @@ -19,9 +19,16 @@ function uploadPartCopy(params, callback) { logHelper(logger, 'error', 'error in uploadPartCopy', error); return callback(error); } + const partNumber = getPartNumber(params.PartNumber); + if (!partNumber) { + const error = errors.InvalidArgument + .customizeDescription('PartNumber is not a number'); + logHelper(logger, 'debug', 'error in uploadPartCopy', error); + return callback(error); + } const mpuParams = { Bucket: params.Bucket, - Key: createMpuKey(params.Key, params.UploadId, params.PartNumber), + Key: createMpuKey(params.Key, params.UploadId, partNumber), CopySource: params.CopySource, }; return this.copyObject(mpuParams, callback); diff --git a/lib/data/external/GCP/GcpUtils.js b/lib/data/external/GCP/GcpUtils.js index 73c2cf1655..fbce594445 100644 --- a/lib/data/external/GCP/GcpUtils.js +++ b/lib/data/external/GCP/GcpUtils.js @@ -32,16 +32,20 @@ function getSourceInfo(CopySource) { return { sourceBucket, sourceObject }; } -function getRandomInt(min, max) { - const minVal = Math.ceil(min); - const maxVal = Math.floor(max); - return Math.floor(Math.random() * (maxVal - minVal)) + minVal; -} - function getPaddedPartNumber(number) { return `000000${number}`.substr(-5); } +function getPartNumber(number) { + if (isNaN(number)) { + return undefined; + } + if (typeof number === 'string') { + return parseInt(number, 10); + } + return number; +} + function createMpuKey(key, uploadId, partNumberArg, fileNameArg) { let partNumber = partNumberArg; let fileName = fileNameArg; @@ -69,10 +73,8 @@ function createMpuList(params, level, size) { // populate and return a parts list for compose const retList = []; for (let i = 1; i <= size; ++i) { - const paddedNumber = getPaddedPartNumber(i); retList.push({ - PartName: - `${params.Key}-${params.UploadId}/${level}/${paddedNumber}`, + PartName: createMpuKey(params.Key, params.UploadId, i, level), PartNumber: i, }); } @@ -154,7 +156,6 @@ function getPutTagsMetadata(metadata, tagging = '') { module.exports = { // functions eachSlice, - getRandomInt, createMpuKey, createMpuList, getSourceInfo, @@ -162,6 +163,7 @@ module.exports = { stripTags, retrieveTags, getPutTagsMetadata, + getPartNumber, // util objects logger, }; diff --git a/lib/data/external/GcpClient.js b/lib/data/external/GcpClient.js index e99357954b..c37fb9c929 100644 --- a/lib/data/external/GcpClient.js +++ b/lib/data/external/GcpClient.js @@ -1,8 +1,17 @@ const async = require('async'); -const { errors } = require('arsenal'); +const { errors, s3middleware } = require('arsenal'); +const MD5Sum = s3middleware.MD5Sum; -const { GCP } = require('./GCP'); +const { GCP, GcpUtils } = require('./GCP'); +const { createMpuKey } = GcpUtils; const AwsClient = require('./AwsClient'); +const { prepareStream } = require('../../api/apiUtils/object/prepareStream'); +const { logHelper, removeQuotes } = require('./utils'); +const { config } = require('../../Config'); + +const missingVerIdInternalError = errors.InternalError.customizeDescription( + 'Invalid state. Please ensure versioning is enabled ' + + 'in GCP for the location constraint and try again.'); /** * Class representing a Google Cloud Storage backend object @@ -28,17 +37,20 @@ class GcpClient extends AwsClient { this.type = 'GCP'; this._gcpBucketName = config.bucketName; this._mpuBucketName = config.mpuBucket; - this._overflowBucketName = config.overflowBucket; + this._createGcpKey = this._createAwsKey; this._gcpParams = Object.assign(this._s3Params, { mainBucket: this._gcpBucketName, mpuBucket: this._mpuBucketName, }); this._client = new GCP(this._gcpParams); + // reassign inherited list parts method from AWS to trigger + // listing using S3 metadata part list instead of request to GCP + this.listParts = undefined; } /** * healthcheck - the gcp health requires checking multiple buckets: - * main, mpu, and overflow buckets + * main and mpu buckets * @param {string} location - location name * @param {function} callback - callback function to call with the bucket * statuses @@ -77,6 +89,205 @@ class GcpClient extends AwsClient { return callback(null, gcpResp); }); } + + createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType, + cacheControl, contentDisposition, contentEncoding, log, callback) { + const metaHeadersTrimmed = {}; + Object.keys(metaHeaders).forEach(header => { + if (header.startsWith('x-amz-meta-')) { + const headerKey = header.substring(11); + metaHeadersTrimmed[headerKey] = metaHeaders[header]; + } + }); + const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch); + const params = { + Bucket: this._mpuBucketName, + Key: gcpKey, + Metadata: metaHeadersTrimmed, + ContentType: contentType, + CacheControl: cacheControl, + ContentDisposition: contentDisposition, + ContentEncoding: contentEncoding, + }; + return this._client.createMultipartUpload(params, (err, mpuResObj) => { + if (err) { + logHelper(log, 'error', 'err from data backend', + err, this._dataStoreName, this.clientType); + return callback(errors.ServiceUnavailable + .customizeDescription('Error returned from ' + + `GCP: ${err.message}`) + ); + } + return callback(null, mpuResObj); + }); + } + + completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) { + const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch); + const partArray = []; + const partList = jsonList.Part; + for (let i = 0; i < partList.length; ++i) { + const partObj = partList[i]; + if (!partObj.PartNumber || !partObj.ETag) { + return callback(errors.MalformedXML); + } + const number = partObj.PartNumber[0]; + // check if the partNumber is an actual number throw an error + // otherwise + if (isNaN(number)) { + return callback(errors.MalformedXML); + } + const partNumber = parseInt(number, 10); + const partParams = { + PartName: createMpuKey(gcpKey, uploadId, partNumber), + PartNumber: partNumber, + ETag: partObj.ETag[0], + }; + partArray.push(partParams); + } + const mpuParams = { + Bucket: this._gcpBucketName, + MPU: this._mpuBucketName, + Key: gcpKey, + UploadId: uploadId, + MultipartUpload: { Parts: partArray }, + }; + const completeObjData = { key: gcpKey }; + return this._client.completeMultipartUpload(mpuParams, + (err, completeMpuRes) => { + if (err) { + logHelper(log, 'error', 'err from data backend on ' + + 'completeMPU', err, this._dataStoreName, this.clientType); + return callback(errors.ServiceUnavailable + .customizeDescription('Error returned from ' + + `GCP: ${err.message}`) + ); + } + if (!completeMpuRes.VersionId) { + logHelper(log, 'error', 'missing version id for data ' + + 'backend object', missingVerIdInternalError, + this._dataStoreName, this.clientType); + return callback(missingVerIdInternalError); + } + // remove quotes from eTag because they're added later + completeObjData.eTag = removeQuotes(completeMpuRes.ETag); + completeObjData.dataStoreVersionId = completeMpuRes.VersionId; + completeObjData.contentLength = completeMpuRes.ContentLength; + return callback(null, completeObjData); + }); + } + + uploadPart(request, streamingV4Params, stream, size, key, uploadId, + partNumber, bucketName, log, callback) { + let hashedStream = stream; + if (request) { + const partStream = prepareStream(request, streamingV4Params, + log, callback); + hashedStream = new MD5Sum(); + partStream.pipe(hashedStream); + } + + const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch); + const params = { + Bucket: this._mpuBucketName, + Key: gcpKey, + UploadId: uploadId, + Body: hashedStream, + ContentLength: size, + PartNumber: partNumber }; + return this._client.uploadPart(params, (err, partResObj) => { + if (err) { + logHelper(log, 'error', 'err from data backend ' + + 'on uploadPart', err, this._dataStoreName, this.clientType); + return callback(errors.ServiceUnavailable + .customizeDescription('Error returned from ' + + `GCP: ${err.message}`) + ); + } + // remove quotes from eTag because they're added later + const noQuotesETag = removeQuotes(partResObj.ETag); + const dataRetrievalInfo = { + key: gcpKey, + dataStoreType: 'gcp', + dataStoreName: this._dataStoreName, + dataStoreETag: noQuotesETag, + }; + return callback(null, dataRetrievalInfo); + }); + } + + uploadPartCopy(request, gcpSourceKey, sourceLocationConstraintName, log, + callback) { + const destBucketName = request.bucketName; + const destObjectKey = request.objectKey; + const destGcpKey = this._createGcpKey(destBucketName, destObjectKey, + this._bucketMatch); + + const sourceGcpBucketName = + config.getGcpBucketNames(sourceLocationConstraintName).bucketName; + + const uploadId = request.query.uploadId; + const partNumber = request.query.partNumber; + const copySourceRange = request.headers['x-amz-copy-source-range']; + + if (copySourceRange) { + return callback(errors.NotImplemented + .customizeDescription('Error returned from ' + + `${this.clientType}: copySourceRange not implemented`) + ); + } + + const params = { + Bucket: this._mpuBucketName, + CopySource: `${sourceGcpBucketName}/${gcpSourceKey}`, + Key: destGcpKey, + UploadId: uploadId, + PartNumber: partNumber, + }; + return this._client.uploadPartCopy(params, (err, res) => { + if (err) { + if (err.code === 'AccesssDenied') { + logHelper(log, 'error', 'Unable to access ' + + `${sourceGcpBucketName} GCP bucket`, err, + this._dataStoreName, this.clientType); + return callback(errors.AccessDenied + .customizeDescription('Error: Unable to access ' + + `${sourceGcpBucketName} GCP bucket`) + ); + } + logHelper(log, 'error', 'error from data backend on ' + + 'uploadPartCopy', err, this._dataStoreName); + return callback(errors.ServiceUnavailable + .customizeDescription('Error returned from ' + + `GCP: ${err.message}`) + ); + } + // remove quotes from eTag because they're added later + const eTag = removeQuotes(res.CopyObjectResult.ETag); + return callback(null, eTag); + }); + } + + abortMPU(key, uploadId, bucketName, log, callback) { + const gcpKey = this._createGcpKey(bucketName, key, this._bucketMatch); + const getParams = { + Bucket: this._gcpBucketName, + MPU: this._mpuBucketName, + Key: gcpKey, + UploadId: uploadId, + }; + return this._client.abortMultipartUpload(getParams, err => { + if (err) { + logHelper(log, 'error', 'err from data backend ' + + 'on abortMPU', err, this._dataStoreName, this.clientType); + return callback(errors.ServiceUnavailable + .customizeDescription('Error returned from ' + + `GCP: ${err.message}`) + ); + } + return callback(); + }); + } } module.exports = GcpClient; diff --git a/lib/data/multipleBackendGateway.js b/lib/data/multipleBackendGateway.js index 93a05b6558..0208222f2c 100644 --- a/lib/data/multipleBackendGateway.js +++ b/lib/data/multipleBackendGateway.js @@ -173,7 +173,7 @@ const multipleBackendGateway = { location, contentType, cacheControl, contentDisposition, contentEncoding, log, cb) => { const client = clients[location]; - if (client.clientType === 'aws_s3') { + if (client.clientType === 'aws_s3' || client.clientType === 'gcp') { return client.createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType, cacheControl, contentDisposition, contentEncoding, log, cb); @@ -225,8 +225,8 @@ const multipleBackendGateway = { abortMPU: (key, uploadId, location, bucketName, log, cb) => { const client = clients[location]; + const skipDataDelete = true; if (client.clientType === 'azure') { - const skipDataDelete = true; return cb(null, skipDataDelete); } if (client.abortMPU) { @@ -234,6 +234,9 @@ const multipleBackendGateway = { if (err) { return cb(err); } + if (client.clientType === 'gcp') { + return cb(null, skipDataDelete); + } return cb(); }); } diff --git a/lib/data/wrapper.js b/lib/data/wrapper.js index 8aebc072eb..48096fada3 100644 --- a/lib/data/wrapper.js +++ b/lib/data/wrapper.js @@ -569,6 +569,18 @@ const data = { }; locations.push(partResult); return cb(); + } else if ( + partInfo && + partInfo.dataStoreType === 'gcp') { + const partResult = { + key: partInfo.key, + dataStoreName: partInfo.dataStoreName, + dataStoreETag: partInfo.dataStoreETag, + size: numberPartSize, + partNumber: partInfo.partNumber, + }; + locations.push(partResult); + return cb(); } return cb(skipError); }); @@ -672,26 +684,26 @@ const data = { lastModified, serverSideEncryption, []); }); } - - const locationTypeMatchAWS = - config.backends.data === 'multiple' && - config.getLocationConstraintType(sourceLocationConstraintName) === - config.getLocationConstraintType(destLocationConstraintName) && - config.getLocationConstraintType(sourceLocationConstraintName) === - 'aws_s3'; + const srcType = + config.getLocationConstraintType(sourceLocationConstraintName); + const dstType = + config.getLocationConstraintType(destLocationConstraintName); + const locationTypeMatch = + config.backends.data === 'multiple' && srcType === dstType && + constants.hasCopyPartBackends[srcType]; // NOTE: using multipleBackendGateway.uploadPartCopy only if copying - // from AWS to AWS - - if (locationTypeMatchAWS && dataLocator.length === 1) { - const awsSourceKey = dataLocator[0].key; + // from AWS to AWS or from GCP to GCP + if (locationTypeMatch && dataLocator.length === 1) { + const sourceKey = dataLocator[0].key; return multipleBackendGateway.uploadPartCopy(request, - destLocationConstraintName, awsSourceKey, + destLocationConstraintName, sourceKey, sourceLocationConstraintName, log, (error, eTag) => { if (error) { return callback(error); } - return callback(skipError, eTag, + const doSkip = srcType === 'aws' ? skipError : null; + return callback(doSkip, eTag, lastModified, serverSideEncryption); }); } diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/initMPU/initMPUGcp.js b/tests/functional/aws-node-sdk/test/multipleBackend/initMPU/initMPUGcp.js new file mode 100644 index 0000000000..f3c9df933c --- /dev/null +++ b/tests/functional/aws-node-sdk/test/multipleBackend/initMPU/initMPUGcp.js @@ -0,0 +1,93 @@ +const async = require('async'); +const assert = require('assert'); + +const withV4 = require('../../support/withV4'); +const BucketUtility = require('../../../lib/utility/bucket-util'); +const { describeSkipIfNotMultiple, gcpClient, gcpBucketMPU, gcpLocation } = + require('../utils'); +const { createMpuKey } = + require('../../../../../../lib/data/external/GCP').GcpUtils; + +const bucket = 'buckettestmultiplebackendinitmpu-gcp'; +const keyName = `somekey-${Date.now()}`; + +let s3; +let bucketUtil; + +describeSkipIfNotMultiple('Initiate MPU to GCP', () => { + withV4(sigCfg => { + beforeEach(() => { + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + }); + + afterEach(() => { + process.stdout.write('Emptying bucket\n'); + return bucketUtil.empty(bucket) + .then(() => { + process.stdout.write('Deleting bucket\n'); + return bucketUtil.deleteOne(bucket); + }) + .catch(err => { + process.stdout.write(`Error in afterEach: ${err}\n`); + throw err; + }); + }); + describe('Basic test: ', () => { + beforeEach(done => + s3.createBucket({ Bucket: bucket, + CreateBucketConfiguration: { + LocationConstraint: gcpLocation, + }, + }, done)); + afterEach(function afterEachF(done) { + const params = { + Bucket: bucket, + Key: keyName, + UploadId: this.currentTest.uploadId, + }; + s3.abortMultipartUpload(params, done); + }); + it('should create MPU and list in-progress multipart uploads', + function ifF(done) { + const params = { + Bucket: bucket, + Key: keyName, + Metadata: { 'scal-location-constraint': gcpLocation }, + }; + async.waterfall([ + next => s3.createMultipartUpload(params, (err, res) => { + this.test.uploadId = res.UploadId; + assert(this.test.uploadId); + assert.strictEqual(res.Bucket, bucket); + assert.strictEqual(res.Key, keyName); + next(err); + }), + next => s3.listMultipartUploads( + { Bucket: bucket }, (err, res) => { + assert.strictEqual(res.NextKeyMarker, keyName); + assert.strictEqual(res.NextUploadIdMarker, + this.test.uploadId); + assert.strictEqual(res.Uploads[0].Key, keyName); + assert.strictEqual(res.Uploads[0].UploadId, + this.test.uploadId); + next(err); + }), + next => { + const mpuKey = + createMpuKey(keyName, this.test.uploadId, 'init'); + const params = { + Bucket: gcpBucketMPU, + Key: mpuKey, + }; + gcpClient.getObject(params, err => { + assert.ifError(err, + `Expected success, but got err ${err}`); + next(); + }); + }, + ], done); + }); + }); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/listParts/listPartsGcp.js b/tests/functional/aws-node-sdk/test/multipleBackend/listParts/listPartsGcp.js new file mode 100644 index 0000000000..7a521f3fe2 --- /dev/null +++ b/tests/functional/aws-node-sdk/test/multipleBackend/listParts/listPartsGcp.js @@ -0,0 +1,96 @@ +const assert = require('assert'); + +const withV4 = require('../../support/withV4'); +const BucketUtility = require('../../../lib/utility/bucket-util'); +const { describeSkipIfNotMultiple, gcpLocation } + = require('../utils'); + +const bucket = 'buckettestmultiplebackendlistparts-gcp'; +const firstPartSize = 10; +const bodyFirstPart = Buffer.alloc(firstPartSize); +const secondPartSize = 15; +const bodySecondPart = Buffer.alloc(secondPartSize); + +let bucketUtil; +let s3; + +describeSkipIfNotMultiple('List parts of MPU on GCP data backend', () => { + withV4(sigCfg => { + beforeEach(function beforeEachFn() { + this.currentTest.key = `somekey-${Date.now()}`; + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + return s3.createBucketAsync({ Bucket: bucket }) + .then(() => s3.createMultipartUploadAsync({ + Bucket: bucket, Key: this.currentTest.key, + Metadata: { 'scal-location-constraint': gcpLocation } })) + .then(res => { + this.currentTest.uploadId = res.UploadId; + return s3.uploadPartAsync({ Bucket: bucket, + Key: this.currentTest.key, PartNumber: 1, + UploadId: this.currentTest.uploadId, Body: bodyFirstPart }); + }).then(res => { + this.currentTest.firstEtag = res.ETag; + }).then(() => s3.uploadPartAsync({ Bucket: bucket, + Key: this.currentTest.key, PartNumber: 2, + UploadId: this.currentTest.uploadId, Body: bodySecondPart }) + ).then(res => { + this.currentTest.secondEtag = res.ETag; + }) + .catch(err => { + process.stdout.write(`Error in beforeEach: ${err}\n`); + throw err; + }); + }); + + afterEach(function afterEachFn() { + process.stdout.write('Emptying bucket'); + return s3.abortMultipartUploadAsync({ + Bucket: bucket, Key: this.currentTest.key, + UploadId: this.currentTest.uploadId, + }) + .then(() => bucketUtil.empty(bucket)) + .then(() => { + process.stdout.write('Deleting bucket'); + return bucketUtil.deleteOne(bucket); + }) + .catch(err => { + process.stdout.write('Error in afterEach'); + throw err; + }); + }); + + it('should list both parts', function itFn(done) { + s3.listParts({ + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId }, + (err, data) => { + assert.equal(err, null, `Err listing parts: ${err}`); + assert.strictEqual(data.Parts.length, 2); + assert.strictEqual(data.Parts[0].PartNumber, 1); + assert.strictEqual(data.Parts[0].Size, firstPartSize); + assert.strictEqual(data.Parts[0].ETag, this.test.firstEtag); + assert.strictEqual(data.Parts[1].PartNumber, 2); + assert.strictEqual(data.Parts[1].Size, secondPartSize); + assert.strictEqual(data.Parts[1].ETag, this.test.secondEtag); + done(); + }); + }); + + it('should only list the second part', function itFn(done) { + s3.listParts({ + Bucket: bucket, + Key: this.test.key, + PartNumberMarker: 1, + UploadId: this.test.uploadId }, + (err, data) => { + assert.equal(err, null, `Err listing parts: ${err}`); + assert.strictEqual(data.Parts[0].PartNumber, 2); + assert.strictEqual(data.Parts[0].Size, secondPartSize); + assert.strictEqual(data.Parts[0].ETag, this.test.secondEtag); + done(); + }); + }); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/mpuAbort/abortMPUGcp.js b/tests/functional/aws-node-sdk/test/multipleBackend/mpuAbort/abortMPUGcp.js new file mode 100644 index 0000000000..d5fe84269a --- /dev/null +++ b/tests/functional/aws-node-sdk/test/multipleBackend/mpuAbort/abortMPUGcp.js @@ -0,0 +1,193 @@ +const assert = require('assert'); +const async = require('async'); + +const withV4 = require('../../support/withV4'); +const BucketUtility = require('../../../lib/utility/bucket-util'); +const { describeSkipIfNotMultiple, gcpClient, gcpBucket, gcpBucketMPU, + gcpLocation, uniqName } = require('../utils'); + +const keyObject = 'abortgcp'; +const bucket = 'buckettestmultiplebackendabortmpu-gcp'; +const body = Buffer.from('I am a body', 'utf8'); +const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a'; +const gcpTimeout = 5000; + +let bucketUtil; +let s3; + +function checkMPUList(bucket, key, uploadId, cb) { + const params = { + Bucket: bucket, + Key: key, + UploadId: uploadId, + }; + gcpClient.listParts(params, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.deepStrictEqual(res.Contents, [], + 'Expected 0 parts, listed some'); + cb(); + }); +} + +describeSkipIfNotMultiple('Abort MPU on GCP data backend', function +descrbeFn() { + this.timeout(180000); + withV4(sigCfg => { + beforeEach(function beforeFn() { + this.currentTest.key = uniqName(keyObject); + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + }); + + describe('with bucket location header', () => { + beforeEach(function beforeEachFn(done) { + async.waterfall([ + next => s3.createBucket({ Bucket: bucket }, + err => next(err)), + next => s3.createMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { 'scal-location-constraint': gcpLocation }, + }, (err, res) => { + if (err) { + return next(err); + } + this.currentTest.uploadId = res.UploadId; + return next(); + }), + ], done); + }); + + afterEach(done => s3.deleteBucket({ Bucket: bucket }, + done)); + + it('should abort a MPU with 0 parts', function itFn(done) { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.abortMultipartUpload(params, () => next()), + next => setTimeout(() => checkMPUList( + gcpBucketMPU, this.test.key, this.test.uploadId, next), + gcpTimeout), + ], done); + }); + + it('should abort a MPU with uploaded parts', function itFn(done) { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => { + async.times(2, (n, cb) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + Body: body, + PartNumber: n + 1, + }; + s3.uploadPart(params, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual( + res.ETag, `"${correctMD5}"`); + cb(); + }); + }, () => next()); + }, + next => s3.abortMultipartUpload(params, () => next()), + next => setTimeout(() => checkMPUList( + gcpBucketMPU, this.test.key, this.test.uploadId, next), + gcpTimeout), + ], done); + }); + }); + + describe('with previously existing object with same key', () => { + beforeEach(function beforeEachFn(done) { + async.waterfall([ + next => s3.createBucket({ Bucket: bucket }, + err => next(err)), + next => { + s3.putObject({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { + 'scal-location-constraint': gcpLocation }, + Body: body, + }, err => { + assert.ifError(err, + `Expected success, got error: ${err}`); + return next(); + }); + }, + next => s3.createMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { 'scal-location-constraint': gcpLocation }, + }, (err, res) => { + if (err) { + return next(err); + } + this.currentTest.uploadId = res.UploadId; + return next(); + }), + ], done); + }); + + afterEach(() => { + process.stdout.write('Emptying bucket\n'); + return bucketUtil.empty(bucket) + .then(() => { + process.stdout.write('Deleting bucket\n'); + return bucketUtil.deleteOne(bucket); + }) + .catch(err => { + process.stdout.write('Error emptying/deleting bucket: ' + + `${err}\n`); + throw err; + }); + }); + + it('should abort MPU without deleting existing object', + function itFn(done) { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => { + const body = Buffer.alloc(10); + const partParams = Object.assign( + { PartNumber: 1, Body: body }, params); + s3.uploadPart(partParams, err => { + assert.ifError(err, + `Expected success, got error: ${err}`); + return next(); + }); + }, + next => s3.abortMultipartUpload(params, () => next()), + next => setTimeout(() => { + const params = { + Bucket: gcpBucket, + Key: this.test.key, + }; + gcpClient.getObject(params, (err, res) => { + assert.ifError(err, + `Expected success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${correctMD5}"`); + next(); + }); + }, gcpTimeout), + ], done); + }); + }); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/mpuComplete/completeMPUGcp.js b/tests/functional/aws-node-sdk/test/multipleBackend/mpuComplete/completeMPUGcp.js new file mode 100644 index 0000000000..4b6b139d91 --- /dev/null +++ b/tests/functional/aws-node-sdk/test/multipleBackend/mpuComplete/completeMPUGcp.js @@ -0,0 +1,247 @@ +const assert = require('assert'); +const async = require('async'); + +const withV4 = require('../../support/withV4'); +const BucketUtility = require('../../../lib/utility/bucket-util'); +const { describeSkipIfNotMultiple, fileLocation, awsS3, awsLocation, awsBucket, + gcpClient, gcpBucket, gcpLocation, gcpLocationMismatch } = + require('../utils'); + +const bucket = 'buckettestmultiplebackendcompletempu-gcp'; +const smallBody = Buffer.from('I am a body', 'utf8'); +const bigBody = Buffer.alloc(10485760); +const s3MD5 = 'bfb875032e51cbe2a60c5b6b99a2153f-2'; +const expectedContentLength = '10485771'; +const gcpTimeout = 5000; + +let s3; +let bucketUtil; + +function getCheck(key, bucketMatch, cb) { + let gcpKey = key; + s3.getObject({ Bucket: bucket, Key: gcpKey }, + (err, s3Res) => { + assert.equal(err, null, `Err getting object from S3: ${err}`); + assert.strictEqual(s3Res.ETag, `"${s3MD5}"`); + + if (!bucketMatch) { + gcpKey = `${bucket}/${gcpKey}`; + } + const params = { Bucket: gcpBucket, Key: gcpKey }; + gcpClient.getObject(params, (err, gcpRes) => { + assert.equal(err, null, `Err getting object from GCP: ${err}`); + assert.strictEqual(expectedContentLength, gcpRes.ContentLength); + cb(); + }); + }); +} + +function mpuSetup(key, location, cb) { + const partArray = []; + async.waterfall([ + next => { + const params = { + Bucket: bucket, + Key: key, + Metadata: { 'scal-location-constraint': location }, + }; + s3.createMultipartUpload(params, (err, res) => { + const uploadId = res.UploadId; + assert(uploadId); + assert.strictEqual(res.Bucket, bucket); + assert.strictEqual(res.Key, key); + next(err, uploadId); + }); + }, + (uploadId, next) => { + const partParams = { + Bucket: bucket, + Key: key, + PartNumber: 1, + UploadId: uploadId, + Body: smallBody, + }; + s3.uploadPart(partParams, (err, res) => { + partArray.push({ ETag: res.ETag, PartNumber: 1 }); + next(err, uploadId); + }); + }, + (uploadId, next) => { + const partParams = { + Bucket: bucket, + Key: key, + PartNumber: 2, + UploadId: uploadId, + Body: bigBody, + }; + s3.uploadPart(partParams, (err, res) => { + partArray.push({ ETag: res.ETag, PartNumber: 2 }); + next(err, uploadId); + }); + }, + ], (err, uploadId) => { + process.stdout.write('Created MPU and put two parts\n'); + assert.equal(err, null, `Err setting up MPU: ${err}`); + cb(uploadId, partArray); + }); +} + +describeSkipIfNotMultiple('Complete MPU API for GCP data backend', +function testSuite() { + this.timeout(150000); + withV4(sigCfg => { + beforeEach(function beFn() { + this.currentTest.key = `somekey-${Date.now()}`; + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + this.currentTest.awsClient = awsS3; + return s3.createBucketAsync({ Bucket: bucket }) + .catch(err => { + process.stdout.write(`Error creating bucket: ${err}\n`); + throw err; + }); + }); + + afterEach(() => { + process.stdout.write('Emptying bucket\n'); + return bucketUtil.empty(bucket) + .then(() => { + process.stdout.write('Deleting bucket\n'); + return bucketUtil.deleteOne(bucket); + }) + .catch(err => { + process.stdout.write(`Error in afterEach: ${err}\n`); + throw err; + }); + }); + + it('should complete an MPU on GCP', function itFn(done) { + mpuSetup(this.test.key, gcpLocation, (uploadId, partArray) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: uploadId, + MultipartUpload: { Parts: partArray }, + }; + setTimeout(() => { + s3.completeMultipartUpload(params, err => { + assert.equal(err, null, + `Err completing MPU: ${err}`); + getCheck(this.test.key, true, done); + }); + }, gcpTimeout); + }); + }); + + it('should complete an MPU on GCP with bucketMatch=false', + function itFn(done) { + mpuSetup(this.test.key, gcpLocationMismatch, + (uploadId, partArray) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: uploadId, + MultipartUpload: { Parts: partArray }, + }; + setTimeout(() => { + s3.completeMultipartUpload(params, err => { + assert.equal(err, null, + `Err completing MPU: ${err}`); + getCheck(this.test.key, false, done); + }); + }, gcpTimeout); + }); + }); + + it('should complete an MPU on GCP with same key as object put ' + + 'to file', function itFn(done) { + const body = Buffer.from('I am a body', 'utf8'); + s3.putObject({ + Bucket: bucket, + Key: this.test.key, + Body: body, + Metadata: { 'scal-location-constraint': fileLocation } }, + err => { + assert.equal(err, null, `Err putting object to file: ${err}`); + mpuSetup(this.test.key, gcpLocation, + (uploadId, partArray) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: uploadId, + MultipartUpload: { Parts: partArray }, + }; + setTimeout(() => { + s3.completeMultipartUpload(params, err => { + assert.equal(err, null, + `Err completing MPU: ${err}`); + getCheck(this.test.key, true, done); + }); + }, gcpTimeout); + }); + }); + }); + + it('should complete an MPU on GCP with same key as object put ' + + 'to GCP', function itFn(done) { + const body = Buffer.from('I am a body', 'utf8'); + s3.putObject({ + Bucket: bucket, + Key: this.test.key, + Body: body, + Metadata: { 'scal-location-constraint': gcpLocation } }, + err => { + assert.equal(err, null, `Err putting object to GCP: ${err}`); + mpuSetup(this.test.key, gcpLocation, + (uploadId, partArray) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: uploadId, + MultipartUpload: { Parts: partArray }, + }; + setTimeout(() => { + s3.completeMultipartUpload(params, err => { + assert.equal(err, null, + `Err completing MPU: ${err}`); + getCheck(this.test.key, true, done); + }); + }, gcpTimeout); + }); + }); + }); + + it('should complete an MPU on GCP with same key as object put ' + + 'to AWS', function itFn(done) { + const body = Buffer.from('I am a body', 'utf8'); + s3.putObject({ + Bucket: bucket, + Key: this.test.key, + Body: body, + Metadata: { 'scal-location-constraint': awsLocation } }, + err => { + assert.equal(err, null, `Err putting object to AWS: ${err}`); + mpuSetup(this.test.key, gcpLocation, + (uploadId, partArray) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: uploadId, + MultipartUpload: { Parts: partArray }, + }; + s3.completeMultipartUpload(params, err => { + assert.equal(err, null, `Err completing MPU: ${err}`); + // make sure object is gone from AWS + setTimeout(() => { + this.test.awsClient.getObject({ Bucket: awsBucket, + Key: this.test.key }, err => { + assert.strictEqual(err.code, 'NoSuchKey'); + getCheck(this.test.key, true, done); + }); + }, gcpTimeout); + }); + }); + }); + }); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/mpuParts/putPartGcp.js b/tests/functional/aws-node-sdk/test/multipleBackend/mpuParts/putPartGcp.js new file mode 100644 index 0000000000..e3928f9f76 --- /dev/null +++ b/tests/functional/aws-node-sdk/test/multipleBackend/mpuParts/putPartGcp.js @@ -0,0 +1,338 @@ +const assert = require('assert'); +const async = require('async'); + +const withV4 = require('../../support/withV4'); +const BucketUtility = require('../../../lib/utility/bucket-util'); +const { describeSkipIfNotMultiple, gcpClient, gcpBucket, gcpBucketMPU, + gcpLocation, gcpLocationMismatch, uniqName } = require('../utils'); +const { createMpuKey } = + require('../../../../../../lib/data/external/GCP').GcpUtils; + +const keyObject = 'putgcp'; +const bucket = 'buckettestmultiplebackendputpart-gcp'; +const body = Buffer.from('I am a body', 'utf8'); +const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a'; +const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e'; + +let bucketUtil; +let s3; + +function checkMPUResult(bucket, key, uploadId, objCount, expected, cb) { + const params = { + Bucket: bucket, + Key: key, + UploadId: uploadId, + }; + gcpClient.listParts(params, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert((res && res.Contents && + res.Contents.length === objCount)); + res.Contents.forEach(part => { + assert.strictEqual( + part.ETag, `"${expected}"`); + }); + cb(); + }); +} + +describeSkipIfNotMultiple('MultipleBacked put part to GCP', function +describeFn() { + this.timeout(180000); + withV4(sigCfg => { + beforeEach(function beforeFn() { + this.currentTest.key = uniqName(keyObject); + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + }); + + describe('with bucket location header', () => { + beforeEach(function beforeEachFn(done) { + async.waterfall([ + next => s3.createBucket({ Bucket: bucket, + }, err => next(err)), + next => s3.createMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { 'scal-location-constraint': gcpLocation }, + }, (err, res) => { + if (err) { + return next(err); + } + this.currentTest.uploadId = res.UploadId; + return next(); + }), + ], done); + }); + + afterEach(function afterEachFn(done) { + async.waterfall([ + next => s3.abortMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + UploadId: this.currentTest.uploadId, + }, err => next(err)), + next => s3.deleteBucket({ Bucket: bucket }, + err => next(err)), + ], err => { + assert.equal(err, null, `Error aborting MPU: ${err}`); + done(); + }); + }); + + it('should put 0-byte part to GCP', function itFn(done) { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + PartNumber: 1, + }; + async.waterfall([ + next => s3.uploadPart(params, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual(res.ETag, `"${emptyMD5}"`); + next(); + }), + next => { + const mpuKey = + createMpuKey(this.test.key, this.test.uploadId, 1); + const getParams = { + Bucket: gcpBucketMPU, + Key: mpuKey, + }; + gcpClient.getObject(getParams, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual(res.ETag, `"${emptyMD5}"`); + next(); + }); + }, + ], done); + }); + + it('should put 2 parts to GCP', function ifFn(done) { + async.waterfall([ + next => { + async.times(2, (n, cb) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + Body: body, + PartNumber: n + 1, + }; + s3.uploadPart(params, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual( + res.ETag, `"${correctMD5}"`); + cb(); + }); + }, () => next()); + }, + next => checkMPUResult( + gcpBucketMPU, this.test.key, this.test.uploadId, + 2, correctMD5, next), + ], done); + }); + + it('should put the same part twice', function ifFn(done) { + async.waterfall([ + next => { + const partBody = ['', body]; + const partMD5 = [emptyMD5, correctMD5]; + async.timesSeries(2, (n, cb) => { + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + Body: partBody[n], + PartNumber: 1, + }; + s3.uploadPart(params, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual( + res.ETag, `"${partMD5[n]}"`); + cb(); + }); + }, () => next()); + }, + next => checkMPUResult( + gcpBucketMPU, this.test.key, this.test.uploadId, + 1, correctMD5, next), + ], done); + }); + }); + + describe('with same key as preexisting part', () => { + beforeEach(function beforeEachFn(done) { + async.waterfall([ + next => s3.createBucket({ Bucket: bucket }, + err => next(err)), + next => { + s3.putObject({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { + 'scal-location-constraint': gcpLocation }, + Body: body, + }, err => { + assert.equal(err, null, 'Err putting object to ' + + `GCP: ${err}`); + return next(); + }); + }, + next => s3.createMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { 'scal-location-constraint': gcpLocation }, + }, (err, res) => { + if (err) { + return next(err); + } + this.currentTest.uploadId = res.UploadId; + return next(); + }), + ], done); + }); + + afterEach(function afterEachFn(done) { + async.waterfall([ + next => { + process.stdout.write('Aborting multipart upload\n'); + s3.abortMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + UploadId: this.currentTest.uploadId }, + err => next(err)); + }, + next => { + process.stdout.write('Deleting object\n'); + s3.deleteObject({ + Bucket: bucket, + Key: this.currentTest.key }, + err => next(err)); + }, + next => { + process.stdout.write('Deleting bucket\n'); + s3.deleteBucket({ + Bucket: bucket }, + err => next(err)); + }, + ], err => { + assert.equal(err, null, `Err in afterEach: ${err}`); + done(); + }); + }); + + it('should put a part without overwriting existing object', + function itFn(done) { + const body = Buffer.alloc(20); + s3.uploadPart({ + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + PartNumber: 1, + Body: body, + }, err => { + assert.strictEqual(err, null, 'Err putting part to ' + + `GCP: ${err}`); + gcpClient.getObject({ + Bucket: gcpBucket, + Key: this.test.key, + }, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual(res.ETag, `"${correctMD5}"`); + done(); + }); + }); + }); + }); + }); +}); + +describeSkipIfNotMultiple('MultipleBackend put part to GCP location with ' + +'bucketMatch sets to false', function +describeF() { + this.timeout(80000); + withV4(sigCfg => { + beforeEach(function beforeFn() { + this.currentTest.key = uniqName(keyObject); + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + }); + describe('with bucket location header', () => { + beforeEach(function beforeEachFn(done) { + async.waterfall([ + next => s3.createBucket({ Bucket: bucket, + }, err => next(err)), + next => s3.createMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + Metadata: { 'scal-location-constraint': + gcpLocationMismatch }, + }, (err, res) => { + if (err) { + return next(err); + } + this.currentTest.uploadId = res.UploadId; + return next(); + }), + ], done); + }); + + afterEach(function afterEachFn(done) { + async.waterfall([ + next => s3.abortMultipartUpload({ + Bucket: bucket, + Key: this.currentTest.key, + UploadId: this.currentTest.uploadId, + }, err => next(err)), + next => s3.deleteBucket({ Bucket: bucket }, + err => next(err)), + ], err => { + assert.equal(err, null, `Error aborting MPU: ${err}`); + done(); + }); + }); + + it('should put part to GCP location with bucketMatch' + + ' sets to false', function itFn(done) { + const body20 = Buffer.alloc(20); + const params = { + Bucket: bucket, + Key: this.test.key, + UploadId: this.test.uploadId, + PartNumber: 1, + Body: body20, + }; + const eTagExpected = + '"441018525208457705bf09a8ee3c1093"'; + async.waterfall([ + next => s3.uploadPart(params, (err, res) => { + assert.strictEqual(res.ETag, eTagExpected); + next(err); + }), + next => { + const key = + createMpuKey(this.test.key, this.test.uploadId, 1); + const mpuKey = `${bucket}/${key}`; + const getParams = { + Bucket: gcpBucketMPU, + Key: mpuKey, + }; + gcpClient.getObject(getParams, (err, res) => { + assert.ifError(err, + `Expected success, but got err ${err}`); + assert.strictEqual(res.ETag, eTagExpected); + next(); + }); + }, + ], done); + }); + }); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/objectPutCopyPart/objectPutCopyPartGcp.js b/tests/functional/aws-node-sdk/test/multipleBackend/objectPutCopyPart/objectPutCopyPartGcp.js new file mode 100644 index 0000000000..8aa5567536 --- /dev/null +++ b/tests/functional/aws-node-sdk/test/multipleBackend/objectPutCopyPart/objectPutCopyPartGcp.js @@ -0,0 +1,692 @@ +const async = require('async'); +const assert = require('assert'); + +const { config } = require('../../../../../../lib/Config'); +const withV4 = require('../../support/withV4'); +const BucketUtility = require('../../../lib/utility/bucket-util'); +const { describeSkipIfNotMultiple, uniqName, gcpBucketMPU, + gcpClient, gcpLocation, gcpLocationMismatch, memLocation, + awsLocation, awsS3, getOwnerInfo } = require('../utils'); + +const bucket = 'buckettestmultiplebackendpartcopy-gcp'; + +const memBucketName = 'membucketnameputcopypartgcp'; +const awsBucketName = 'awsbucketnameputcopypartgcp'; + +const normalBodySize = 11; +const normalBody = Buffer.from('I am a body', 'utf8'); +const normalMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a'; + +const sixBytesMD5 = 'c978a461602f0372b5f970157927f723'; + +const oneKb = 1024; +const oneKbBody = Buffer.alloc(oneKb); +const oneKbMD5 = '0f343b0931126a20f133d67c2b018a3b'; + +const fiveMB = 5 * 1024 * 1024; +const fiveMbBody = Buffer.alloc(fiveMB); +const fiveMbMD5 = '5f363e0e58a95f06cbe9bbc662c5dfb6'; + +const keyObjectGcp = 'objectputcopypartgcp'; +const keyObjectMemory = 'objectputcopypartMemory'; +const keyObjectAWS = 'objectputcopypartAWS'; + +const { ownerID, ownerDisplayName } = getOwnerInfo('account1'); + +const result = { + Bucket: '', + Key: '', + UploadId: '', + MaxParts: 1000, + IsTruncated: false, + Parts: [], + Initiator: + { ID: ownerID, + DisplayName: ownerDisplayName }, + Owner: + { DisplayName: ownerDisplayName, + ID: ownerID }, + StorageClass: 'STANDARD', +}; + +let s3; +let bucketUtil; + +function assertCopyPart(infos, cb) { + const { bucketName, keyName, uploadId, md5, totalSize } = infos; + const resultCopy = JSON.parse(JSON.stringify(result)); + resultCopy.Bucket = bucketName; + resultCopy.Key = keyName; + resultCopy.UploadId = uploadId; + async.waterfall([ + next => s3.listParts({ + Bucket: bucketName, + Key: keyName, + UploadId: uploadId, + }, (err, res) => { + assert.ifError(err, 'listParts: Expected success,' + + ` got error: ${err}`); + resultCopy.Parts = + [{ PartNumber: 1, + LastModified: res.Parts[0].LastModified, + ETag: `"${md5}"`, + Size: totalSize }]; + assert.deepStrictEqual(res, resultCopy); + next(); + }), + next => gcpClient.listParts({ + Bucket: gcpBucketMPU, + Key: keyName, + UploadId: uploadId, + }, (err, res) => { + assert.ifError(err, 'GCP listParts: Expected success,' + + `got error: ${err}`); + assert.strictEqual(res.Contents[0].ETag, `"${md5}"`); + next(); + }), + ], cb); +} + +describeSkipIfNotMultiple('Put Copy Part to GCP', function describeFn() { + this.timeout(800000); + withV4(sigCfg => { + beforeEach(done => { + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + s3.createBucket({ Bucket: bucket, + CreateBucketConfiguration: { + LocationConstraint: gcpLocation, + }, + }, done); + }); + + afterEach(() => { + process.stdout.write('Emptying bucket\n'); + return bucketUtil.empty(bucket) + .then(() => bucketUtil.empty(memBucketName)) + .then(() => { + process.stdout.write(`Deleting bucket ${bucket}\n`); + return bucketUtil.deleteOne(bucket); + }) + .then(() => { + process.stdout.write(`Deleting bucket ${memBucketName}\n`); + return bucketUtil.deleteOne(memBucketName); + }) + .catch(err => { + process.stdout.write(`Error in afterEach: ${err}\n`); + throw err; + }); + }); + + describe('Basic test: ', () => { + beforeEach(function beforeFn(done) { + this.currentTest.keyNameNormalGcp = + `normalgcp${uniqName(keyObjectGcp)}`; + this.currentTest.keyNameNormalGcpMismatch = + `normalgcpmismatch${uniqName(keyObjectGcp)}`; + + this.currentTest.keyNameFiveMbGcp = + `fivembgcp${uniqName(keyObjectGcp)}`; + this.currentTest.keyNameFiveMbMem = + `fivembmem${uniqName(keyObjectMemory)}`; + + this.currentTest.mpuKeyNameGcp = + `mpukeyname${uniqName(keyObjectGcp)}`; + this.currentTest.mpuKeyNameMem = + `mpukeyname${uniqName(keyObjectMemory)}`; + this.currentTest.mpuKeyNameAWS = + `mpukeyname${uniqName(keyObjectAWS)}`; + const paramsGcp = { + Bucket: bucket, + Key: this.currentTest.mpuKeyNameGcp, + Metadata: { 'scal-location-constraint': gcpLocation }, + }; + const paramsMem = { + Bucket: memBucketName, + Key: this.currentTest.mpuKeyNameMem, + Metadata: { 'scal-location-constraint': memLocation }, + }; + const paramsAWS = { + Bucket: memBucketName, + Key: this.currentTest.mpuKeyNameAWS, + Metadata: { 'scal-location-constraint': awsLocation }, + }; + async.waterfall([ + next => s3.createBucket({ Bucket: bucket }, + err => next(err)), + next => s3.createBucket({ Bucket: memBucketName }, + err => next(err)), + next => s3.putObject({ + Bucket: bucket, + Key: this.currentTest.keyNameNormalGcp, + Body: normalBody, + Metadata: { 'scal-location-constraint': gcpLocation }, + }, err => next(err)), + next => s3.putObject({ + Bucket: bucket, + Key: this.currentTest.keyNameNormalGcpMismatch, + Body: normalBody, + Metadata: { 'scal-location-constraint': + gcpLocationMismatch }, + }, err => next(err)), + next => s3.putObject({ + Bucket: bucket, + Key: this.currentTest.keyNameFiveMbGcp, + Body: fiveMbBody, + Metadata: { 'scal-location-constraint': gcpLocation }, + }, err => next(err)), + next => s3.putObject({ + Bucket: bucket, + Key: this.currentTest.keyNameFiveMbMem, + Body: fiveMbBody, + Metadata: { 'scal-location-constraint': memLocation }, + }, err => next(err)), + next => s3.createMultipartUpload(paramsGcp, + (err, res) => { + assert.ifError(err, 'createMultipartUpload ' + + `on gcp: Expected success, got error: ${err}`); + this.currentTest.uploadId = res.UploadId; + next(); + }), + next => s3.createMultipartUpload(paramsMem, + (err, res) => { + assert.ifError(err, 'createMultipartUpload ' + + `in memory: Expected success, got error: ${err}`); + this.currentTest.uploadIdMem = res.UploadId; + next(); + }), + next => s3.createMultipartUpload(paramsAWS, + (err, res) => { + assert.ifError(err, 'createMultipartUpload ' + + `on AWS: Expected success, got error: ${err}`); + this.currentTest.uploadIdAWS = res.UploadId; + next(); + }), + ], done); + }); + + afterEach(function afterFn(done) { + const paramsGcp = { + Bucket: bucket, + Key: this.currentTest.mpuKeyNameGcp, + UploadId: this.currentTest.uploadId, + }; + const paramsMem = { + Bucket: memBucketName, + Key: this.currentTest.mpuKeyNameMem, + UploadId: this.currentTest.uploadIdMem, + }; + const paramsAWS = { + Bucket: memBucketName, + Key: this.currentTest.mpuKeyNameAWS, + UploadId: this.currentTest.uploadIdAWS, + }; + async.waterfall([ + next => s3.abortMultipartUpload(paramsGcp, + err => next(err)), + next => s3.abortMultipartUpload(paramsMem, + err => next(err)), + next => s3.abortMultipartUpload(paramsAWS, + err => next(err)), + ], done); + }); + + it('should copy small part from GCP to MPU with GCP location', + function itFn(done) { + const params = { + Bucket: bucket, + CopySource: + `${bucket}/${this.test.keyNameNormalGcp}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 1, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${normalMD5}"`); + next(err); + }), + next => { + const infos = { + bucketName: bucket, + keyName: this.test.mpuKeyNameGcp, + uploadId: this.test.uploadId, + md5: normalMD5, + totalSize: normalBodySize, + }; + assertCopyPart(infos, next); + }, + ], done); + }); + + it('should copy small part from GCP with bucketMatch=false to ' + + 'MPU with GCP location', + function itFn(done) { + const params = { + Bucket: bucket, + CopySource: + `${bucket}/${this.test.keyNameNormalGcpMismatch}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 1, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${normalMD5}"`); + next(err); + }), + next => { + const infos = { + bucketName: bucket, + keyName: this.test.mpuKeyNameGcp, + uploadId: this.test.uploadId, + md5: normalMD5, + totalSize: normalBodySize, + }; + assertCopyPart(infos, next); + }, + ], done); + }); + + it('should copy 5 Mb part from GCP to MPU with GCP location', + function ifF(done) { + const params = { + Bucket: bucket, + CopySource: + `${bucket}/${this.test.keyNameFiveMbGcp}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 1, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${fiveMbMD5}"`); + next(err); + }), + next => { + const infos = { + bucketName: bucket, + keyName: this.test.mpuKeyNameGcp, + uploadId: this.test.uploadId, + md5: fiveMbMD5, + totalSize: fiveMB, + }; + assertCopyPart(infos, next); + }, + ], done); + }); + + it('should copy part from GCP to MPU with memory location', + function ifF(done) { + const params = { + Bucket: memBucketName, + CopySource: + `${bucket}/${this.test.keyNameNormalGcp}`, + Key: this.test.mpuKeyNameMem, + PartNumber: 1, + UploadId: this.test.uploadIdMem, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${normalMD5}"`); + next(err); + }), + next => { + s3.listParts({ + Bucket: memBucketName, + Key: this.test.mpuKeyNameMem, + UploadId: this.test.uploadIdMem, + }, (err, res) => { + assert.ifError(err, + 'listParts: Expected success,' + + ` got error: ${err}`); + const resultCopy = + JSON.parse(JSON.stringify(result)); + resultCopy.Bucket = memBucketName; + resultCopy.Key = this.test.mpuKeyNameMem; + resultCopy.UploadId = this.test.uploadIdMem; + resultCopy.Parts = + [{ PartNumber: 1, + LastModified: res.Parts[0].LastModified, + ETag: `"${normalMD5}"`, + Size: normalBodySize }]; + assert.deepStrictEqual(res, resultCopy); + next(); + }); + }, + ], done); + }); + + it('should copy part from GCP to MPU with AWS location', + function ifF(done) { + const params = { + Bucket: memBucketName, + CopySource: + `${bucket}/${this.test.keyNameNormalGcp}`, + Key: this.test.mpuKeyNameAWS, + PartNumber: 1, + UploadId: this.test.uploadIdAWS, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${normalMD5}"`); + next(err); + }), + next => { + const awsBucket = + config.locationConstraints[awsLocation] + .details.bucketName; + awsS3.listParts({ + Bucket: awsBucket, + Key: this.test.mpuKeyNameAWS, + UploadId: this.test.uploadIdAWS, + }, (err, res) => { + assert.ifError(err, + 'listParts: Expected success,' + + ` got error: ${err}`); + assert.strictEqual(res.Bucket, awsBucket); + assert.strictEqual(res.Key, + this.test.mpuKeyNameAWS); + assert.strictEqual(res.UploadId, + this.test.uploadIdAWS); + assert.strictEqual(res.Parts.length, 1); + assert.strictEqual(res.Parts[0].PartNumber, 1); + assert.strictEqual(res.Parts[0].ETag, + `"${normalMD5}"`); + assert.strictEqual(res.Parts[0].Size, + normalBodySize); + next(); + }); + }, + ], done); + }); + + it('should copy part from GCP object with range to MPU ' + + 'with AWS location', function ifF(done) { + const params = { + Bucket: memBucketName, + CopySource: + `${bucket}/${this.test.keyNameNormalGcp}`, + Key: this.test.mpuKeyNameAWS, + CopySourceRange: 'bytes=0-5', + PartNumber: 1, + UploadId: this.test.uploadIdAWS, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${sixBytesMD5}"`); + next(err); + }), + next => { + const awsBucket = + config.locationConstraints[awsLocation] + .details.bucketName; + awsS3.listParts({ + Bucket: awsBucket, + Key: this.test.mpuKeyNameAWS, + UploadId: this.test.uploadIdAWS, + }, (err, res) => { + assert.ifError(err, + 'listParts: Expected success,' + + ` got error: ${err}`); + assert.strictEqual(res.Bucket, awsBucket); + assert.strictEqual(res.Key, + this.test.mpuKeyNameAWS); + assert.strictEqual(res.UploadId, + this.test.uploadIdAWS); + assert.strictEqual(res.Parts.length, 1); + assert.strictEqual(res.Parts[0].PartNumber, 1); + assert.strictEqual(res.Parts[0].ETag, + `"${sixBytesMD5}"`); + assert.strictEqual(res.Parts[0].Size, 6); + next(); + }); + }, + ], done); + }); + + it('should copy 5 Mb part from a memory location to MPU with ' + + 'GCP location', + function ifF(done) { + const params = { + Bucket: bucket, + CopySource: + `${bucket}/${this.test.keyNameFiveMbMem}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 1, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${fiveMbMD5}"`); + next(err); + }), + next => { + const infos = { + bucketName: bucket, + keyName: this.test.mpuKeyNameGcp, + uploadId: this.test.uploadId, + md5: fiveMbMD5, + totalSize: fiveMB, + }; + assertCopyPart(infos, next); + }, + ], done); + }); + + describe('with existing part', () => { + beforeEach(function beF(done) { + const params = { + Body: oneKbBody, + Bucket: bucket, + Key: this.currentTest.mpuKeyNameGcp, + PartNumber: 1, + UploadId: this.currentTest.uploadId, + }; + s3.uploadPart(params, done); + }); + it('should copy part from GCP to GCP with existing ' + + 'parts', function ifF(done) { + const resultCopy = JSON.parse(JSON.stringify(result)); + const params = { + Bucket: bucket, + CopySource: + `${bucket}/${this.test.keyNameNormalGcp}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 2, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.uploadPartCopy(params, (err, res) => { + assert.ifError(err, + 'uploadPartCopy: Expected success, got ' + + `error: ${err}`); + assert.strictEqual(res.ETag, `"${normalMD5}"`); + next(err); + }), + next => s3.listParts({ + Bucket: bucket, + Key: this.test.mpuKeyNameGcp, + UploadId: this.test.uploadId, + }, (err, res) => { + assert.ifError(err, 'listParts: Expected ' + + `success, got error: ${err}`); + resultCopy.Bucket = bucket; + resultCopy.Key = this.test.mpuKeyNameGcp; + resultCopy.UploadId = this.test.uploadId; + resultCopy.Parts = + [{ PartNumber: 1, + LastModified: res.Parts[0].LastModified, + ETag: `"${oneKbMD5}"`, + Size: oneKb }, + { PartNumber: 2, + LastModified: res.Parts[1].LastModified, + ETag: `"${normalMD5}"`, + Size: 11 }, + ]; + assert.deepStrictEqual(res, resultCopy); + next(); + }), + next => gcpClient.listParts({ + Bucket: gcpBucketMPU, + Key: this.test.mpuKeyNameGcp, + UploadId: this.test.uploadId, + }, (err, res) => { + assert.ifError(err, 'GCP listParts: Expected ' + + `success, got error: ${err}`); + assert.strictEqual( + res.Contents[0].ETag, `"${oneKbMD5}"`); + assert.strictEqual( + res.Contents[1].ETag, `"${normalMD5}"`); + next(); + }), + ], done); + }); + }); + }); + }); +}); + +describeSkipIfNotMultiple('Put Copy Part to GCP with complete MPU', +function describeF() { + this.timeout(800000); + withV4(sigCfg => { + beforeEach(() => { + bucketUtil = new BucketUtility('default', sigCfg); + s3 = bucketUtil.s3; + }); + + afterEach(() => { + process.stdout.write('Emptying bucket\n'); + return bucketUtil.empty(bucket) + .then(() => { + process.stdout.write('Deleting bucket\n'); + return bucketUtil.deleteOne(bucket); + }) + .then(() => { + process.stdout.write('Emptying bucket awsBucketName\n'); + return bucketUtil.empty(awsBucketName); + }) + .then(() => { + process.stdout.write('Deleting bucket awsBucketName\n'); + return bucketUtil.deleteOne(awsBucketName); + }) + .catch(err => { + process.stdout.write(`Error in afterEach: ${err}\n`); + throw err; + }); + }); + describe('Basic test with complete MPU from AWS to GCP location: ', + () => { + beforeEach(function beF(done) { + this.currentTest.keyNameAws = + `onehundredandfivembgcp${uniqName(keyObjectAWS)}`; + this.currentTest.mpuKeyNameGcp = + `mpukeyname${uniqName(keyObjectGcp)}`; + + const createMpuParams = { + Bucket: bucket, + Key: this.currentTest.mpuKeyNameGcp, + Metadata: { 'scal-location-constraint': gcpLocation }, + }; + async.waterfall([ + next => s3.createBucket({ Bucket: awsBucketName }, + err => next(err)), + next => s3.createBucket({ Bucket: bucket }, + err => next(err)), + next => s3.putObject({ + Bucket: awsBucketName, + Key: this.currentTest.keyNameAws, + Body: fiveMbBody, + Metadata: { 'scal-location-constraint': awsLocation }, + }, err => next(err)), + next => s3.createMultipartUpload(createMpuParams, + (err, res) => { + assert.equal(err, null, 'createMultipartUpload: ' + + `Expected success, got error: ${err}`); + this.currentTest.uploadId = res.UploadId; + next(); + }), + ], done); + }); + + it('should copy two 5 MB part from GCP to MPU with GCP' + + 'location', function ifF(done) { + const uploadParams = { + Bucket: bucket, + CopySource: + `${awsBucketName}/` + + `${this.test.keyNameAws}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 1, + UploadId: this.test.uploadId, + }; + const uploadParams2 = { + Bucket: bucket, + CopySource: + `${awsBucketName}/` + + `${this.test.keyNameAws}`, + Key: this.test.mpuKeyNameGcp, + PartNumber: 2, + UploadId: this.test.uploadId, + }; + async.waterfall([ + next => s3.uploadPartCopy(uploadParams, (err, res) => { + assert.equal(err, null, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${fiveMbMD5}"`); + next(err); + }), + next => s3.uploadPartCopy(uploadParams2, (err, res) => { + assert.equal(err, null, 'uploadPartCopy: Expected ' + + `success, got error: ${err}`); + assert.strictEqual(res.ETag, `"${fiveMbMD5}"`); + next(err); + }), + next => { + const completeMpuParams = { + Bucket: bucket, + Key: this.test.mpuKeyNameGcp, + MultipartUpload: { + Parts: [ + { + ETag: `"${fiveMbMD5}"`, + PartNumber: 1, + }, + { + ETag: `"${fiveMbMD5}"`, + PartNumber: 2, + }, + ], + }, + UploadId: this.test.uploadId, + }; + s3.completeMultipartUpload(completeMpuParams, + (err, res) => { + assert.equal(err, null, 'completeMultipartUpload:' + + ` Expected success, got error: ${err}`); + assert.strictEqual(res.Bucket, bucket); + assert.strictEqual(res.Key, + this.test.mpuKeyNameGcp); + next(); + }); + }, + ], done); + }); + }); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/utils.js b/tests/functional/aws-node-sdk/test/multipleBackend/utils.js index 8906f51925..d3d29038a8 100644 --- a/tests/functional/aws-node-sdk/test/multipleBackend/utils.js +++ b/tests/functional/aws-node-sdk/test/multipleBackend/utils.js @@ -35,6 +35,7 @@ let awsBucket; let gcpClient; let gcpBucket; +let gcpBucketMPU; if (config.backends.data === 'multiple') { describeSkipIfNotMultiple = describe; @@ -45,6 +46,8 @@ if (config.backends.data === 'multiple') { const gcpConfig = getRealAwsConfig(gcpLocation); gcpClient = new GCP(gcpConfig); gcpBucket = config.locationConstraints[gcpLocation].details.bucketName; + gcpBucketMPU = + config.locationConstraints[gcpLocation].details.mpuBucketName; } function _assertErrorResult(err, expectedError, desc) { @@ -63,6 +66,7 @@ const utils = { awsBucket, gcpClient, gcpBucket, + gcpBucketMPU, fileLocation, memLocation, awsLocation, diff --git a/tests/unit/utils/gcpMpuHelpers.js b/tests/unit/utils/gcpMpuHelpers.js new file mode 100644 index 0000000000..e54e54b9ab --- /dev/null +++ b/tests/unit/utils/gcpMpuHelpers.js @@ -0,0 +1,65 @@ +const assert = require('assert'); +const uuid = require('uuid/v4'); +const { createMpuKey, createMpuList } = + require('../../../lib/data/external/GCP').GcpUtils; + +const key = `somekey${Date.now()}`; +const uploadId = uuid().replace(/-/g, ''); +const phase = 'createMpulist'; +const size = 2; +const correctMpuList = [ + { PartName: `${key}-${uploadId}/${phase}/00001`, PartNumber: 1 }, + { PartName: `${key}-${uploadId}/${phase}/00002`, PartNumber: 2 }, +]; + +describe('GcpUtils MPU Helper Functions:', () => { + describe('createMpuKey', () => { + const tests = [ + { + it: 'if phase and part number are given', + input: { phase: 'test', partNumber: 1 }, + output: `${key}-${uploadId}/test/00001`, + }, + { + it: 'if only phase is given', + input: { phase: 'test' }, + output: `${key}-${uploadId}/test`, + }, + { + it: 'if part number is given', + input: { partNumber: 1 }, + output: `${key}-${uploadId}/parts/00001`, + }, + { + it: 'if phase and part number aren not given', + input: {}, + output: `${key}-${uploadId}/`, + }, + ]; + tests.forEach(test => { + it(test.it, () => { + const { partNumber, phase } = test.input; + assert.strictEqual(createMpuKey( + key, uploadId, partNumber, phase), test.output); + }); + }); + }); + + describe('createMpuList', () => { + const tests = [ + { + it: 'should create valid mpu list', + input: { phase, size }, + output: correctMpuList, + }, + ]; + tests.forEach(test => { + it(test.it, () => { + const { phase, size } = test.input; + assert.deepStrictEqual(createMpuList( + { Key: key, UploadId: uploadId }, phase, size), + test.output); + }); + }); + }); +}); diff --git a/tests/unit/utils/gcpTaggingHelpers.js b/tests/unit/utils/gcpTaggingHelpers.js new file mode 100644 index 0000000000..33414d2953 --- /dev/null +++ b/tests/unit/utils/gcpTaggingHelpers.js @@ -0,0 +1,156 @@ +const assert = require('assert'); +const { errors } = require('arsenal'); +const { gcpTaggingPrefix } = require('../../../constants'); +const { genPutTagObj } = + require('../../../tests/functional/raw-node/utils/gcpUtils'); +const { processTagSet, stripTags, retrieveTags, getPutTagsMetadata } = + require('../../../lib/data/external/GCP').GcpUtils; + +const maxTagSize = 10; +const validTagSet = genPutTagObj(2); +const validTagObj = {}; +validTagObj[`${gcpTaggingPrefix}key0`] = 'Value0'; +validTagObj[`${gcpTaggingPrefix}key1`] = 'Value1'; +const tagQuery = 'key0=Value0&key1=Value1'; +const invalidSizeTagSet = genPutTagObj(maxTagSize + 1); +const invalidDuplicateTagSet = genPutTagObj(maxTagSize, true); +const invalidKeyTagSet = [{ Key: Buffer.alloc(129, 'a'), Value: 'value' }]; +const invalidValueTagSet = [{ Key: 'key', Value: Buffer.alloc(257, 'a') }]; +const onlyMetadata = { + metadata1: 'metadatavalue1', + metadata2: 'metadatavalue2', +}; +const tagMetadata = Object.assign({}, validTagObj, onlyMetadata); +const oldTagMetadata = {}; +oldTagMetadata[`${gcpTaggingPrefix}Old`] = 'OldValue0'; +const withPriorTags = Object.assign({}, onlyMetadata, oldTagMetadata); + +describe('GcpUtils Tagging Helper Functions:', () => { + describe('processTagSet', () => { + const tests = [ + { + it: 'should return tag object as metadata for valid tag set', + input: validTagSet, + output: validTagObj, + }, + { + it: 'should return error for invalid tag set size', + input: invalidSizeTagSet, + output: errors.BadRequest.customizeDescription( + 'Object tags cannot be greater than 10'), + }, + { + it: 'should return error for duplicate tag keys', + input: invalidDuplicateTagSet, + output: errors.InvalidTag.customizeDescription( + 'Cannot provide multiple Tags with the same key'), + }, + { + it: 'should return error for invalid "key" value', + input: invalidKeyTagSet, + output: errors.InvalidTag.customizeDescription( + 'The TagKey provided is too long, 129'), + }, + { + it: 'should return error for invalid "value" value', + input: invalidValueTagSet, + output: errors.InvalidTag.customizeDescription( + 'The TagValue provided is too long, 257'), + }, + { + it: 'should return empty tag object when input is undefined', + input: undefined, + output: {}, + }, + ]; + tests.forEach(test => { + it(test.it, () => { + assert.deepStrictEqual(processTagSet(test.input), test.output); + }); + }); + }); + + describe('stripTags', () => { + const tests = [ + { + it: 'should return metadata without tag', + input: tagMetadata, + output: onlyMetadata, + }, + { + it: 'should return empty object if metadata only has tags', + input: validTagObj, + output: {}, + }, + { + it: 'should return empty object if input is undefined', + input: undefined, + output: {}, + }, + ]; + tests.forEach(test => { + it(test.it, () => { + assert.deepStrictEqual(stripTags(test.input), test.output); + }); + }); + }); + + describe('retrieveTags', () => { + const tests = [ + { + it: 'should return tagSet from given input metadata', + input: tagMetadata, + output: validTagSet, + }, + { + it: 'should return empty when metadata does not have tags', + input: onlyMetadata, + output: [], + }, + { + it: 'should return empty if input is undefined', + input: undefined, + output: [], + }, + ]; + tests.forEach(test => { + it(test.it, () => { + assert.deepStrictEqual(retrieveTags(test.input), test.output); + }); + }); + }); + + describe('getPutTagsMetadata', () => { + const tests = [ + { + it: 'should return correct object when' + + ' given a tag query string and a metadata obj', + input: { metadata: Object.assign({}, onlyMetadata), tagQuery }, + output: tagMetadata, + }, + { + it: 'should return correct object when given only query string', + input: { tagQuery }, + output: validTagObj, + }, + { + it: 'should return correct object when only metadata is given', + input: { metadata: onlyMetadata }, + output: onlyMetadata, + }, + { + it: 'should return metadata with correct tag properties ' + + 'if given a metdata with prior tags and query string', + input: { metadata: Object.assign({}, withPriorTags), tagQuery }, + output: tagMetadata, + }, + ]; + tests.forEach(test => { + it(test.it, () => { + const { metadata, tagQuery } = test.input; + assert.deepStrictEqual( + getPutTagsMetadata(metadata, tagQuery), test.output); + }); + }); + }); +});