Skip to content

Commit

Permalink
Enabled s3 Bucket encryption by default
Browse files Browse the repository at this point in the history
  • Loading branch information
gitwater committed Aug 4, 2021
1 parent e9d6d65 commit 830e434
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 19 deletions.
8 changes: 8 additions & 0 deletions src/paco/cftemplates/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,14 @@ def __init__(self, stack, paco_ctx, bucket_context, bucket_policy_only):
})
cfn_export_dict['NotificationConfiguration']["LambdaConfigurations"] = lambda_notifs

# Encryption on by default
cfn_export_dict['BucketEncryption'] = {
'ServerSideEncryptionConfiguration': [{
'ServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
}]
}
s3_resource = troposphere.s3.Bucket.from_dict(s3_logical_id, cfn_export_dict)
s3_resource.DeletionPolicy = 'Retain' # We always retain. Bucket cleanup is handled by Stack hooks.
template.add_resource(s3_resource)
Expand Down
51 changes: 32 additions & 19 deletions src/paco/config/paco_buckets.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ def get_bucket_name(self, account_ctx, region):

def upload_file(self, file_location, s3_key, account_ctx, region):
"Upload a file to a Paco Bucket"
if not self.is_bucket_created(account_ctx, region):
self.create_bucket(account_ctx, region)

self.create_bucket(account_ctx, region)
bucket_name = self.get_bucket_name(account_ctx, region)
s3_client = account_ctx.get_aws_client('s3', region)
s3_client.upload_file(file_location, bucket_name, s3_key)
Expand All @@ -41,8 +41,7 @@ def upload_file(self, file_location, s3_key, account_ctx, region):
def upload_fileobj(self, file_contents, s3_key, account_ctx, region):
"Upload a file to a Paco Bucket"
fileobj = io.BytesIO(file_contents.encode())
if not self.is_bucket_created(account_ctx, region):
self.create_bucket(account_ctx, region)
self.create_bucket(account_ctx, region)
bucket_name = self.get_bucket_name(account_ctx, region)
s3_client = account_ctx.get_aws_client('s3', region)
s3_client.upload_fileobj(fileobj, bucket_name, s3_key)
Expand All @@ -66,8 +65,7 @@ def get_object(self, s3_key, account_ctx, region):

def put_object(self, s3_key, obj, account_ctx, region):
"""Put an S3 Object in a Paco Bucket"""
if not self.is_bucket_created(account_ctx, region):
self.create_bucket(account_ctx, region)
self.create_bucket(account_ctx, region)
bucket_name = self.get_bucket_name(account_ctx, region)
s3_client = account_ctx.get_aws_client('s3', region)
if type(obj) != bytes:
Expand All @@ -81,23 +79,25 @@ def create_bucket(self, account_ctx, region):
s3_client = account_ctx.get_aws_client('s3', region)
# ToDo: check if bucket exists and handle that
# us-east-1 is a "special default" region - the AWS API behaves differently
if region == 'us-east-1':
s3_client.create_bucket(
ACL='private',
Bucket=bucket_name,
)
else:
s3_client.create_bucket(
ACL='private',
Bucket=bucket_name,
CreateBucketConfiguration={
'LocationConstraint': region,
},
)
if not self.is_bucket_created(account_ctx, region):
if region == 'us-east-1':
s3_client.create_bucket(
ACL='private',
Bucket=bucket_name,
)
else:
s3_client.create_bucket(
ACL='private',
Bucket=bucket_name,
CreateBucketConfiguration={
'LocationConstraint': region,
},
)
s3_client.put_bucket_versioning(
Bucket=bucket_name,
VersioningConfiguration={'Status':'Enabled'},
)

s3_client.put_public_access_block(
Bucket=bucket_name,
PublicAccessBlockConfiguration={
Expand All @@ -107,6 +107,19 @@ def create_bucket(self, account_ctx, region):
'RestrictPublicBuckets': True,
}
)
response = s3_client.put_bucket_encryption(
Bucket=bucket_name,
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
}
]
}
)


def is_bucket_created(self, account_ctx, region):
"True if the S3 Bucket for the account and region exists"
Expand Down

0 comments on commit 830e434

Please sign in to comment.