Skip to content

Commit

Permalink
refactor(AWS Deploy): Generalize uploadZipFile logic
Browse files Browse the repository at this point in the history
  • Loading branch information
medikoo committed Feb 25, 2022
1 parent 8b95986 commit 26bc112
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 60 deletions.
21 changes: 21 additions & 0 deletions lib/aws/set-s3-upload-encryption-options.js
@@ -0,0 +1,21 @@
'use strict';

module.exports = (putParams, deploymentBucketOptions) => {
const encryptionFields = [
['serverSideEncryption', 'ServerSideEncryption'],
['sseCustomerAlgorithim', 'SSECustomerAlgorithm'],
['sseCustomerKey', 'SSECustomerKey'],
['sseCustomerKeyMD5', 'SSECustomerKeyMD5'],
['sseKMSKeyId', 'SSEKMSKeyId'],
];

const params = putParams;

encryptionFields.forEach((element) => {
if (deploymentBucketOptions[element[0]]) {
params[element[1]] = deploymentBucketOptions[element[0]];
}
});

return params;
};
2 changes: 2 additions & 0 deletions lib/plugins/aws/deploy/index.js
Expand Up @@ -10,6 +10,7 @@ const checkIfBucketExists = require('../lib/check-if-bucket-exists');
const getCreateChangeSetParams = require('../lib/get-create-change-set-params');
const getExecuteChangeSetParams = require('../lib/get-execute-change-set-params');
const waitForChangeSetCreation = require('../lib/wait-for-change-set-creation');
const uploadZipFile = require('../lib/upload-zip-file');
const createStack = require('./lib/create-stack');
const cleanupS3Bucket = require('./lib/cleanup-s3-bucket');
const uploadArtifacts = require('./lib/upload-artifacts');
Expand Down Expand Up @@ -47,6 +48,7 @@ class AwsDeploy {
monitorStack,
checkIfBucketExists,
waitForChangeSetCreation,
uploadZipFile,
getCreateChangeSetParams,
getExecuteChangeSetParams
);
Expand Down
63 changes: 3 additions & 60 deletions lib/plugins/aws/deploy/lib/upload-artifacts.js
@@ -1,7 +1,6 @@
'use strict';

const _ = require('lodash');
const fs = require('fs');
const fsp = require('fs').promises;
const path = require('path');
const crypto = require('crypto');
Expand All @@ -10,6 +9,7 @@ const filesize = require('filesize');
const normalizeFiles = require('../../lib/normalize-files');
const getLambdaLayerArtifactPath = require('../../utils/get-lambda-layer-artifact-path');
const ServerlessError = require('../../../../serverless-error');
const setS3UploadEncryptionOptions = require('../../../../aws/set-s3-upload-encryption-options');
const { progress, log } = require('@serverless/utils/log');

const MAX_CONCURRENT_ARTIFACTS_UPLOADS =
Expand Down Expand Up @@ -69,7 +69,7 @@ module.exports = {

const deploymentBucketObject = this.serverless.service.provider.deploymentBucketObject;
if (deploymentBucketObject) {
params = setServersideEncryptionOptions(params, deploymentBucketObject);
params = setS3UploadEncryptionOptions(params, deploymentBucketObject);
}

return this.provider.request('S3', 'upload', params);
Expand All @@ -95,49 +95,12 @@ module.exports = {

const deploymentBucketObject = this.serverless.service.provider.deploymentBucketObject;
if (deploymentBucketObject) {
params = setServersideEncryptionOptions(params, deploymentBucketObject);
params = setS3UploadEncryptionOptions(params, deploymentBucketObject);
}

return this.provider.request('S3', 'upload', params);
},

async uploadZipFile({ filename, s3KeyDirname }) {
const fileName = filename.split(path.sep).pop();

// TODO refactor to be async (use util function to compute checksum async)
const data = fs.readFileSync(filename);
const fileHash = crypto.createHash('sha256').update(data).digest('base64');

const artifactStream = fs.createReadStream(filename);
// As AWS SDK request might be postponed (requests are queued)
// eventual stream error may crash the process (it's thrown as uncaught if not observed).
// Below lines prevent that
let streamError;
artifactStream.on('error', (error) => (streamError = error));

let params = {
Bucket: this.bucketName,
Key: `${s3KeyDirname}/${fileName}`,
Body: artifactStream,
ContentType: 'application/zip',
Metadata: {
filesha256: fileHash,
},
};

const deploymentBucketObject = this.serverless.service.provider.deploymentBucketObject;
if (deploymentBucketObject) {
params = setServersideEncryptionOptions(params, deploymentBucketObject);
}

const response = await this.provider.request('S3', 'upload', params);
// Interestingly, if request handling was queued, and stream errored (before being consumed by
// AWS SDK) then SDK call succeeds without actually uploading a file to S3 bucket.
// Below line ensures that eventual stream error is communicated
if (streamError) throw streamError;
return response;
},

async getFunctionArtifactFilePaths() {
const functionNames = this.serverless.service.getAllFunctions();
return _.uniq(
Expand Down Expand Up @@ -245,23 +208,3 @@ module.exports = {
}
},
};

function setServersideEncryptionOptions(putParams, deploymentBucketOptions) {
const encryptionFields = [
['serverSideEncryption', 'ServerSideEncryption'],
['sseCustomerAlgorithim', 'SSECustomerAlgorithm'],
['sseCustomerKey', 'SSECustomerKey'],
['sseCustomerKeyMD5', 'SSECustomerKeyMD5'],
['sseKMSKeyId', 'SSEKMSKeyId'],
];

const params = putParams;

encryptionFields.forEach((element) => {
if (deploymentBucketOptions[element[0]]) {
params[element[1]] = deploymentBucketOptions[element[0]];
}
}, this);

return params;
}
45 changes: 45 additions & 0 deletions lib/plugins/aws/lib/upload-zip-file.js
@@ -0,0 +1,45 @@
'use strict';

const path = require('path');
const fs = require('fs');
const crypto = require('crypto');
const setS3UploadEncryptionOptions = require('../../../aws/set-s3-upload-encryption-options');

module.exports = {
async uploadZipFile({ filename, s3KeyDirname }) {
const fileName = filename.split(path.sep).pop();

// TODO refactor to be async (use util function to compute checksum async)
const data = fs.readFileSync(filename);
const fileHash = crypto.createHash('sha256').update(data).digest('base64');

const artifactStream = fs.createReadStream(filename);
// As AWS SDK request might be postponed (requests are queued)
// eventual stream error may crash the process (it's thrown as uncaught if not observed).
// Below lines prevent that
let streamError;
artifactStream.on('error', (error) => (streamError = error));

let params = {
Bucket: this.bucketName,
Key: `${s3KeyDirname}/${fileName}`,
Body: artifactStream,
ContentType: 'application/zip',
Metadata: {
filesha256: fileHash,
},
};

const deploymentBucketObject = this.serverless.service.provider.deploymentBucketObject;
if (deploymentBucketObject) {
params = setS3UploadEncryptionOptions(params, deploymentBucketObject);
}

const response = await this.provider.request('S3', 'upload', params);
// Interestingly, if request handling was queued, and stream errored (before being consumed by
// AWS SDK) then SDK call succeeds without actually uploading a file to S3 bucket.
// Below line ensures that eventual stream error is communicated
if (streamError) throw streamError;
return response;
},
};

0 comments on commit 26bc112

Please sign in to comment.