Skip to content

Commit

Permalink
feat(s3-deployment): allow multiple Sources for single Deployment (#4105
Browse files Browse the repository at this point in the history
)

* feat(s3-deployment): allow multiple Sources for single Deployment

In some cases, a user may want to inject other sources into a
deployment. For instance, storing JavaScript bundles in one
directory and images in another, then combining them in a
single bucket for CloudFront distributions.

* All sources will be fetched and consolidated into one directory
  before being zipped as normal prior to deployment

BREAKING CHANGE:
* Property `source` is now `sources` and is a `Source` array

* fixup! feat(s3-deployment): allow multiple Sources for single Deployment

* fixup! feat(s3-deployment): allow multiple Sources for single Deployment

* fixup! feat(s3-deployment): allow multiple Sources for single Deployment
  • Loading branch information
CaerusKaru authored and mergify[bot] committed Sep 19, 2019
1 parent 57a7ae0 commit 2ce4a87
Show file tree
Hide file tree
Showing 13 changed files with 264 additions and 134 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ coverage/
.LAST_BUILD
*.sw[a-z]
*~
.idea

This comment has been minimized.

Copy link
@nmussy

nmussy Sep 19, 2019

Contributor

<3


# We don't want tsconfig at the root
/tsconfig.json
Expand Down
14 changes: 8 additions & 6 deletions packages/@aws-cdk/aws-s3-deployment/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@

> __Status: Experimental__
This library allows populating an S3 bucket with the contents of a .zip file
from another S3 bucket or from local disk.
This library allows populating an S3 bucket with the contents of .zip files
from other S3 buckets or from local disk.

The following example defines a publicly accessible S3 bucket with web hosting
enabled and populates it from a local directory on disk.
Expand All @@ -30,7 +30,7 @@ const websiteBucket = new s3.Bucket(this, 'WebsiteBucket', {
});

new s3deploy.BucketDeployment(this, 'DeployWebsite', {
source: s3deploy.Source.asset('./website-dist'),
sources: [s3deploy.Source.asset('./website-dist')],
destinationBucket: websiteBucket,
destinationKeyPrefix: 'web/static' // optional prefix in destination bucket
});
Expand All @@ -40,13 +40,15 @@ This is what happens under the hood:

1. When this stack is deployed (either via `cdk deploy` or via CI/CD), the
contents of the local `website-dist` directory will be archived and uploaded
to an intermediary assets bucket.
to an intermediary assets bucket. If there is more than one source, they will
be individually uploaded.
2. The `BucketDeployment` construct synthesizes a custom CloudFormation resource
of type `Custom::CDKBucketDeployment` into the template. The source bucket/key
is set to point to the assets bucket.
3. The custom resource downloads the .zip archive, extracts it and issues `aws
s3 sync --delete` against the destination bucket (in this case
`websiteBucket`).
`websiteBucket`). If there is more than one source, the sources will be
downloaded and merged pre-deployment at this step.

## Supported sources

Expand Down Expand Up @@ -82,7 +84,7 @@ const distribution = new cloudfront.CloudFrontWebDistribution(this, 'Distributio
});

new s3deploy.BucketDeployment(this, 'DeployWithInvalidation', {
source: s3deploy.Source.asset('./website-dist'),
sources: [s3deploy.Source.asset('./website-dist')],
destinationBucket: bucket,
distribution,
distributionPaths: ['/images/*.png'],
Expand Down
25 changes: 13 additions & 12 deletions packages/@aws-cdk/aws-s3-deployment/lambda/src/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ def cfn_error(message=None):
physical_id = event.get('PhysicalResourceId', None)

try:
source_bucket_name = props['SourceBucketName']
source_object_key = props['SourceObjectKey']
source_bucket_names = props['SourceBucketNames']
source_object_keys = props['SourceObjectKeys']
dest_bucket_name = props['DestinationBucketName']
dest_bucket_prefix = props.get('DestinationBucketKeyPrefix', '')
retain_on_delete = props.get('RetainOnDelete', "true") == "true"
Expand All @@ -62,7 +62,7 @@ def cfn_error(message=None):
if dest_bucket_prefix == "/":
dest_bucket_prefix = ""

s3_source_zip = "s3://%s/%s" % (source_bucket_name, source_object_key)
s3_source_zips = map(lambda name, key: "s3://%s/%s" % (name, key), source_bucket_names, source_object_keys)
s3_dest = "s3://%s/%s" % (dest_bucket_name, dest_bucket_prefix)

old_s3_dest = "s3://%s/%s" % (old_props.get("DestinationBucketName", ""), old_props.get("DestinationBucketKeyPrefix", ""))
Expand Down Expand Up @@ -96,7 +96,7 @@ def cfn_error(message=None):
aws_command("s3", "rm", old_s3_dest, "--recursive")

if request_type == "Update" or request_type == "Create":
s3_deploy(s3_source_zip, s3_dest)
s3_deploy(s3_source_zips, s3_dest)

if distribution_id:
cloudfront_invalidate(distribution_id, distribution_paths)
Expand All @@ -109,8 +109,8 @@ def cfn_error(message=None):
cfn_error(str(e))

#---------------------------------------------------------------------------------------------------
# populate all files from s3_source_zip to a destination bucket
def s3_deploy(s3_source_zip, s3_dest):
# populate all files from s3_source_zips to a destination bucket
def s3_deploy(s3_source_zips, s3_dest):
# create a temporary working directory
workdir=tempfile.mkdtemp()
logger.info("| workdir: %s" % workdir)
Expand All @@ -120,12 +120,13 @@ def s3_deploy(s3_source_zip, s3_dest):
os.mkdir(contents_dir)

# download the archive from the source and extract to "contents"
archive=os.path.join(workdir, 'archive.zip')
logger.info("| archive: %s" % archive)
aws_command("s3", "cp", s3_source_zip, archive)
logger.info("| extracting archive to: %s" % contents_dir)
with ZipFile(archive, "r") as zip:
zip.extractall(contents_dir)
for s3_source_zip in s3_source_zips:
archive=os.path.join(workdir, str(uuid4()))
logger.info("archive: %s" % archive)
aws_command("s3", "cp", s3_source_zip, archive)
logger.info("| extracting archive to: %s\n" % contents_dir)
with ZipFile(archive, "r") as zip:
zip.extractall(contents_dir)

# sync from "contents" to destination
aws_command("s3", "sync", "--delete", contents_dir, s3_dest)
Expand Down
105 changes: 60 additions & 45 deletions packages/@aws-cdk/aws-s3-deployment/lambda/test/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ def setUp(self):

def test_invalid_request(self):
resp = invoke_handler("Create", {}, expected_status="FAILED")
self.assertEqual(resp["Reason"], "missing request resource property 'SourceBucketName'. props: {}")
self.assertEqual(resp["Reason"], "missing request resource property 'SourceBucketNames'. props: {}")

def test_create_update(self):
invoke_handler("Create", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>"
})

Expand All @@ -37,10 +37,25 @@ def test_create_update(self):
"s3 sync --delete contents.zip s3://<dest-bucket-name>/"
)

def test_create_update_multiple_sources(self):
invoke_handler("Create", {
"SourceBucketNames": ["<source-bucket1>", "<source-bucket2>"],
"SourceObjectKeys": ["<source-object-key1>", "<source-object-key2>"],
"DestinationBucketName": "<dest-bucket-name>"
})

# Note: these are different files in real-life. For testing purposes, we hijack
# the command to output a static filename, archive.zip
self.assertAwsCommands(
"s3 cp s3://<source-bucket1>/<source-object-key1> archive.zip",
"s3 cp s3://<source-bucket2>/<source-object-key2> archive.zip",
"s3 sync --delete contents.zip s3://<dest-bucket-name>/"
)

def test_create_with_backslash_prefix_same_as_no_prefix(self):
invoke_handler("Create", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DestinationBucketKeyPrefix": "/"
})
Expand All @@ -53,8 +68,8 @@ def test_create_with_backslash_prefix_same_as_no_prefix(self):

def test_create_update_with_dest_key(self):
invoke_handler("Create", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DestinationBucketKeyPrefix": "<dest-key-prefix>"
})
Expand All @@ -66,8 +81,8 @@ def test_create_update_with_dest_key(self):

def test_delete_no_retain(self):
invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"RetainOnDelete": "false"
}, physical_id="<physicalid>")
Expand All @@ -76,8 +91,8 @@ def test_delete_no_retain(self):

def test_delete_with_dest_key(self):
invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DestinationBucketKeyPrefix": "<dest-key-prefix>",
"RetainOnDelete": "false"
Expand All @@ -87,8 +102,8 @@ def test_delete_with_dest_key(self):

def test_delete_with_retain_explicit(self):
invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"RetainOnDelete": "true"
}, physical_id="<physicalid>")
Expand All @@ -99,8 +114,8 @@ def test_delete_with_retain_explicit(self):
# RetainOnDelete=true is the default
def test_delete_with_retain_implicit_default(self):
invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>"
}, physical_id="<physicalid>")

Expand All @@ -109,8 +124,8 @@ def test_delete_with_retain_implicit_default(self):

def test_delete_with_retain_explicitly_false(self):
invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"RetainOnDelete": "false"
}, physical_id="<physicalid>")
Expand All @@ -125,8 +140,8 @@ def test_delete_with_retain_explicitly_false(self):

def test_update_same_dest(self):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
}, old_resource_props={
"DestinationBucketName": "<dest-bucket-name>",
Expand All @@ -150,8 +165,8 @@ def mock_make_api_call(self, operation_name, kwarg):

with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DistributionId": "<cf-dist-id>"
}, old_resource_props={
Expand All @@ -171,8 +186,8 @@ def mock_make_api_call(self, operation_name, kwarg):

with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DestinationBucketKeyPrefix": "<dest-prefix>",
"DistributionId": "<cf-dist-id>"
Expand All @@ -194,8 +209,8 @@ def mock_make_api_call(self, operation_name, kwarg):

with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DistributionId": "<cf-dist-id>",
"DistributionPaths": ["/path1/*", "/path2/*"]
Expand All @@ -205,8 +220,8 @@ def mock_make_api_call(self, operation_name, kwarg):

def test_update_new_dest_retain(self):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
}, old_resource_props={
"DestinationBucketName": "<dest-bucket-name>",
Expand All @@ -220,8 +235,8 @@ def test_update_new_dest_retain(self):

def test_update_new_dest_no_retain(self):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<new-dest-bucket-name>",
"RetainOnDelete": "false"
}, old_resource_props={
Expand All @@ -238,8 +253,8 @@ def test_update_new_dest_no_retain(self):

def test_update_new_dest_retain_implicit(self):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<new-dest-bucket-name>",
}, old_resource_props={
"DestinationBucketName": "<old-dest-bucket-name>",
Expand All @@ -253,8 +268,8 @@ def test_update_new_dest_retain_implicit(self):

def test_update_new_dest_prefix_no_retain(self):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DestinationBucketKeyPrefix": "<new-dest-prefix>",
"RetainOnDelete": "false"
Expand All @@ -271,8 +286,8 @@ def test_update_new_dest_prefix_no_retain(self):

def test_update_new_dest_prefix_retain_implicit(self):
invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"DestinationBucketKeyPrefix": "<new-dest-prefix>"
}, old_resource_props={
Expand All @@ -290,8 +305,8 @@ def test_update_new_dest_prefix_retain_implicit(self):

def test_physical_id_allocated_on_create_and_reused_afterwards(self):
create_resp = invoke_handler("Create", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
})

Expand All @@ -301,8 +316,8 @@ def test_physical_id_allocated_on_create_and_reused_afterwards(self):
# now issue an update and pass in the physical id. expect the same
# one to be returned back
update_resp = invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<new-dest-bucket-name>",
}, old_resource_props={
"DestinationBucketName": "<dest-bucket-name>",
Expand All @@ -311,17 +326,17 @@ def test_physical_id_allocated_on_create_and_reused_afterwards(self):

# now issue a delete, and make sure this also applies
delete_resp = invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<dest-bucket-name>",
"RetainOnDelete": "false"
}, physical_id=phid)
self.assertEqual(delete_resp['PhysicalResourceId'], phid)

def test_fails_when_physical_id_not_present_in_update(self):
update_resp = invoke_handler("Update", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<new-dest-bucket-name>",
}, old_resource_props={
"DestinationBucketName": "<dest-bucket-name>",
Expand All @@ -331,8 +346,8 @@ def test_fails_when_physical_id_not_present_in_update(self):

def test_fails_when_physical_id_not_present_in_delete(self):
update_resp = invoke_handler("Delete", {
"SourceBucketName": "<source-bucket>",
"SourceObjectKey": "<source-object-key>",
"SourceBucketNames": ["<source-bucket>"],
"SourceObjectKeys": ["<source-object-key>"],
"DestinationBucketName": "<new-dest-bucket-name>",
}, old_resource_props={
"DestinationBucketName": "<dest-bucket-name>",
Expand Down

0 comments on commit 2ce4a87

Please sign in to comment.