Skip to content

Commit

Permalink
- Modifications to allow a Windows based ASG to launch
Browse files Browse the repository at this point in the history
- Automated ALB access logs bucket policies
- Added VPCEndpoints CFTemplate
- IAM controll now prints SwitchRole URLs for users on resource.iam operations
  • Loading branch information
gitwater committed May 29, 2021
1 parent 068f49e commit 0f6352f
Show file tree
Hide file tree
Showing 8 changed files with 199 additions and 20 deletions.
37 changes: 21 additions & 16 deletions src/paco/application/reseng_asg.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,15 +88,19 @@ def init_resource(self):
)
role_profile_arn = iam_ctl.role_profile_arn(instance_iam_role_ref)

# EC2 Launch Manger Bundles
bucket = self.app_engine.ec2_launch_manager.process_bundles(self.resource, instance_iam_role_ref)
self.ec2lm_cache_id = ""
ec2_manager_user_data_script = None
if self.resource.instance_ami_type.startswith("windows") == False:
# EC2 Launch Manger Bundles
bucket = self.app_engine.ec2_launch_manager.process_bundles(self.resource, instance_iam_role_ref)

# Create ASG stack
ec2_manager_user_data_script = self.app_engine.ec2_launch_manager.user_data_script(
self.resource,
self.stack_name
)
self.ec2lm_cache_id = self.app_engine.ec2_launch_manager.get_cache_id(self.resource)

# Create ASG stack
ec2_manager_user_data_script = self.app_engine.ec2_launch_manager.user_data_script(
self.resource,
self.stack_name
)
self.ec2lm_cache_id = self.app_engine.ec2_launch_manager.get_cache_id(self.resource)
self.stack = self.stack_group.add_new_stack(
self.aws_region,
self.resource,
Expand All @@ -108,14 +112,15 @@ def init_resource(self):
'ec2_manager_cache_id': self.ec2lm_cache_id,
},
)
self.stack.hooks.add(
name='UpdateExistingInstances.' + self.resource.name,
stack_action='update',
stack_timing='pre',
hook_method=self.app_engine.ec2_launch_manager.ec2lm_update_instances_hook,
cache_method=self.app_engine.ec2_launch_manager.ec2lm_update_instances_cache,
hook_arg=(bucket.paco_ref_parts, self.resource)
)
if self.resource.instance_ami_type.startswith("windows") == False:
self.stack.hooks.add(
name='UpdateExistingInstances.' + self.resource.name,
stack_action='update',
stack_timing='pre',
hook_method=self.app_engine.ec2_launch_manager.ec2lm_update_instances_hook,
cache_method=self.app_engine.ec2_launch_manager.ec2lm_update_instances_cache,
hook_arg=(bucket.paco_ref_parts, self.resource)
)
# For ECS ASGs add an ECS Hook
if self.resource.ecs != None and self.resource.is_enabled() == True:
self.stack.hooks.add(
Expand Down
48 changes: 48 additions & 0 deletions src/paco/application/reseng_lb.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import paco.cftemplates
from paco.application.res_engine import ResourceEngine
from paco.models import vocabulary


class LBApplicationResourceEngine(ResourceEngine):
Expand All @@ -8,6 +9,53 @@ def init_resource(self):
# Set resolve_ref object for TargetGroups
for target_group in self.resource.target_groups.values():
target_group.resolve_ref_obj = self.app_engine

if self.resource.enable_access_logs == True:
access_logs_bucket_policies = []
access_logs_bucket_policies.append({
'aws': [f'arn:aws:iam::{vocabulary.elb_account_id[self.aws_region]}:root'],
'action': [ 's3:PutObject' ],
'effect': 'Allow',
'resource_suffix': [ f'/{self.resource.access_logs_prefix}/AWSLogs/{self.account_ctx.id}/*' ]
})
access_logs_bucket_policies.append({
'principal': {
'Service': 'delivery.logs.amazonaws.com'
},
'action': [ 's3:PutObject' ],
'effect': 'Allow',
'condition': {
"StringEquals": {
"s3:x-amz-acl": "bucket-owner-full-control"
}
},
'resource_suffix': [ f'/{self.resource.access_logs_prefix}/AWSLogs/{self.account_ctx.id}/*' ]
})
access_logs_bucket_policies.append({
'principal': {
'Service': 'delivery.logs.amazonaws.com'
},
'action': [ 's3:GetBucketAcl' ],
'effect': 'Allow',
'resource_suffix': [ '' ]
})
access_logs_bucket_policies.append({
'principal': {
'Service': 'logdelivery.elb.amazonaws.com'
},
'action': [ 's3:PutObject' ],
'effect': 'Allow',
'condition': {
"StringEquals": {
"s3:x-amz-acl": "bucket-owner-full-control"
}
},
'resource_suffix': [ f'/{self.resource.access_logs_prefix}/AWSLogs/{self.account_ctx.id}/*' ]
})
# the S3 Bucket Policy can be added to by multiple DeploymentPipelines
s3_ctl = self.paco_ctx.get_controller('S3')
for access_log_policy in access_logs_bucket_policies:
s3_ctl.add_bucket_policy(self.resource.access_logs_bucket, access_log_policy)
self.stack_group.add_new_stack(
self.aws_region,
self.resource,
Expand Down
1 change: 1 addition & 0 deletions src/paco/cftemplates/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,4 @@
from paco.cftemplates.cognito import CognitoUserPool, CognitoIdentityPool
from paco.cftemplates.dynamodb import DynamoDB
from paco.cftemplates.notification_rules import NotificationRules
from paco.cftemplates.vpcendpoints import VPCEndpoints
2 changes: 1 addition & 1 deletion src/paco/cftemplates/example_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


class Example(StackTemplate):
def __init__(self, stack, paco_ctx)
def __init__(self, stack, paco_ctx):
super().__init__(stack, paco_ctx)
self.set_aws_name('Example', self.resource_group_name, self.resource.name)

Expand Down
102 changes: 102 additions & 0 deletions src/paco/cftemplates/vpcendpoints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
from paco.cftemplates.cftemplates import StackTemplate
from paco.models import schemas
from paco.models.locations import get_parent_by_interface
from paco.utils import md5sum
import troposphere.ec2


class VPCEndpoints(StackTemplate):
def __init__(self, stack, paco_ctx):
super().__init__(stack, paco_ctx)
self.set_aws_name('Endpoints', self.resource_group_name, self.resource.name)

# Troposphere Template Initialization
self.init_template('VPC Endpoints')

if self.resource.endpoints == None:
return
# Resource
network_config = get_parent_by_interface(self.resource, schemas.INetwork)
segment_cache = []
route_table_id_list = []
subnet_id_list = []
security_group_cache = {}
vpc_id_param = self.create_cfn_parameter(
name='VpcId',
param_type='AWS::EC2::VPC::Id',
description='The VPC Id',
value=f'{network_config.paco_ref}.vpc.id'
)
for (endpoint_name, endpoint) in self.resource.endpoints.items():
# Generate a RouteTable Ids
for segment_id in endpoint.segments:
for az_idx in range(1, network_config.availability_zones+1):
# Route Table: TODO: Not needed until we support GATEWAY endpoint types
# route_table_id_param_name = self.create_cfn_logical_id_join(
# str_list=['RouteTable', segment_id, 'AZ', str(az_idx)],
# camel_case=True
# )
# if route_table_id_param_name in segment_cache:
# continue
# segment_cache.append(route_table_id_param_name)
# route_table_id_param = self.create_cfn_parameter(
# name=route_table_id_param_name,
# param_type='String',
# description=f'RouteTable ID for {segment_id} AZ{az_idx}',
# value=f'{network_config.paco_ref}.vpc.segments.{segment_id}.az{az_idx}.route_table.id',
# )
# route_table_id_list.append(troposphere.Ref(route_table_id_param))
# Subnet Id
subnet_id_param_name = self.create_cfn_logical_id_join(
str_list=['SubnetId', segment_id, 'AZ', str(az_idx)],
camel_case=True
)
if subnet_id_param_name in segment_cache:
continue
segment_cache.append(subnet_id_param_name)
subnet_id_param = self.create_cfn_parameter(
name=subnet_id_param_name,
param_type='String',
description=f'Subnet ID for {segment_id} AZ{az_idx}',
value=f'{network_config.paco_ref}.vpc.segments.{segment_id}.az{az_idx}.subnet_id',
)
subnet_id_list.append(troposphere.Ref(subnet_id_param))


name_hash = md5sum(str_data=endpoint.security_group)
security_group_param_name = self.create_cfn_logical_id_join(
str_list=['SecurityGroupId', name_hash]
)
if security_group_param_name not in security_group_cache:
security_group_param = self.create_cfn_parameter(
name=security_group_param_name,
param_type='String',
description=f'SecurityGroupId for endpoint service {endpoint_name}',
value=endpoint.security_group+'.id'
)
security_group_cache[security_group_param_name] = troposphere.Ref(security_group_param)

security_group_id_list = [security_group_cache[security_group_param_name]]
endpoint_dict = {
'ServiceName': f'com.amazonaws.{self.aws_region}.{endpoint.service}',
#'RouteTableIds': route_table_id_list,
'SubnetIds': subnet_id_list,
'SecurityGroupIds': security_group_id_list,
'PrivateDnsEnabled': True,
'VpcId': troposphere.Ref(vpc_id_param),
'VpcEndpointType': 'Interface'

}
endpoint_res = troposphere.ec2.VPCEndpoint.from_dict(
self.create_cfn_logical_id(endpoint_name),
endpoint_dict
)
self.template.add_resource( endpoint_res )

# Outputs
# self.create_output(
# title='ExampleResourceId',
# description="Example resource Id.",
# value=troposphere.Ref(example_res),
# ref=self.resource.paco_ref_parts + ".id"
# )
10 changes: 10 additions & 0 deletions src/paco/controllers/ctl_iam.py
Original file line number Diff line number Diff line change
Expand Up @@ -601,6 +601,16 @@ def init_users(self, model_obj):
}
)

# Print out the SwitchRole URLs for each user
for user in self.iam.users.values():
print(f'{user.description} Switch Role URLs')
for account_name in self.paco_ctx.project['accounts'].keys():
if account_name not in user.account_whitelist and 'all' not in user.account_whitelist:
continue
account_id = self.paco_ctx.get_account_context(account_name=account_name).id
print(f'{account_name.capitalize()}:\nhttps://signin.aws.amazon.com/switchrole?account={account_id}&roleName=IAM-User-Account-Delegate-Role-{user.name}')
print()

def init(self, command=None, model_obj=None):
"Initialize Controller's StackGroup for resource.iam scope"
if model_obj == None:
Expand Down
8 changes: 5 additions & 3 deletions src/paco/controllers/ctl_ssm.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@ def provision(self, scope=None):
scope_parts = scope.split('.')
if scope.startswith('resource.ssm.ssm_documents.') and len(scope_parts) == 6:
name, account_name, aws_region = scope_parts[3:]
account_ctx = self.paco_ctx.get_account_context(account_name=account_name)
ssm_doc = self.paco_ctx.project['resource']['ssm'].ssm_documents[name]
self.provision_ssm_document(ssm_doc, account_ctx, aws_region)
# TODO: EC2LM and Windows: name can == paco_ec2lm_update_instance
if name in self.paco_ctx.project['resource']['ssm'].ssm_documents.keys():
account_ctx = self.paco_ctx.get_account_context(account_name=account_name)
ssm_doc = self.paco_ctx.project['resource']['ssm'].ssm_documents[name]
self.provision_ssm_document(ssm_doc, account_ctx, aws_region)
else:
# ToDo: provisions everything in resource/ssm.yaml - add scopes
for ssm_doc in self.paco_ctx.project['resource']['ssm'].ssm_documents.values():
Expand Down
11 changes: 11 additions & 0 deletions src/paco/stack_grps/grp_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,17 @@ def init(self):
for nat_stack in self.nat_list:
self.add_stack_order(nat_stack, [StackOrder.WAIT])

# VPC Endpoints
vpc_endpoints_stack = self.add_new_stack(
self.region,
vpc_config,
paco.cftemplates.VPCEndpoints,
stack_tags=StackTags(self.stack_tags),
stack_orders=[StackOrder.PROVISION]
)
self.add_stack_order(vpc_endpoints_stack, [StackOrder.WAIT])


def get_vpc_stack(self):
return self.vpc_stack

Expand Down

0 comments on commit 0f6352f

Please sign in to comment.