Skip to content

Commit

Permalink
Workflow performance improvements (#542)
Browse files Browse the repository at this point in the history
- export puppet pipeline logs now exports codebuild logs for each deploy (for scenario when deploy is retried)
- reduced number of tasks created when ssm parameters with depends on is detected  
- fixed defect where existing stack parameters were not being collected on created_complete status stacks affecting parameter comparisons when detecting drift
  • Loading branch information
eamonnfaherty committed Aug 5, 2022
1 parent d4463bc commit 4cedd83
Show file tree
Hide file tree
Showing 9 changed files with 93 additions and 64 deletions.
2 changes: 1 addition & 1 deletion Makefile.Puppet
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ expand:

## @Puppet_commands Runs servicecatalog-puppet --info deploy
deploy:
time poetry run servicecatalog-puppet --info-line-numbers deploy \
time poetry run servicecatalog-puppet --info-line-numbers deploy --num-workers 40 \
ignored/src/ServiceCatalogPuppet/manifest-expanded.yaml

deploy-with-put:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

[tool.poetry]
name = "aws-service-catalog-puppet"
version = "0.180.1"
version = "0.181.0"
description = "Making it easier to deploy ServiceCatalog products"
classifiers = ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Natural Language :: English"]
homepage = "https://service-catalog-tools-workshop.com/"
Expand Down
12 changes: 8 additions & 4 deletions servicecatalog_puppet/commands/management.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,14 @@ def handle_action_execution_detail(puppet_account_id, action_execution_detail):
with betterboto_client.ClientContextManager(
"logs", region_name=config.get_home_region(puppet_account_id)
) as logs:
with open(
f"log-{action_execution_detail.get('input').get('configuration').get('ProjectName')}.log",
"w",
) as f:
project_name = (
action_execution_detail.get("input")
.get("configuration")
.get("ProjectName")
)
action_execution_id = action_execution_detail.get("actionExecutionId")
output_file_name = f"log-{project_name}--{action_execution_id}.log"
with open(output_file_name, "w",) as f:
params = {
"logGroupName": log_details.get("groupName"),
"logStreamName": log_details.get("streamName"),
Expand Down
30 changes: 30 additions & 0 deletions servicecatalog_puppet/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,3 +277,33 @@
"SharePortfolioViaOrgsTask",
"SharePortfolioTask",
]

CLOUDFORMATION_HAPPY_STATUS = [
"CREATE_COMPLETE",
"UPDATE_ROLLBACK_COMPLETE",
"UPDATE_COMPLETE",
"IMPORT_COMPLETE",
"IMPORT_ROLLBACK_COMPLETE",
]

CLOUDFORMATION_UNHAPPY_STATUS = [
"CREATE_FAILED",
"ROLLBACK_FAILED",
"DELETE_FAILED",
"UPDATE_FAILED",
"UPDATE_ROLLBACK_FAILED",
"IMPORT_ROLLBACK_FAILED",
]

CLOUDFORMATION_IN_PROGRESS_STATUS = [
"CREATE_IN_PROGRESS",
"ROLLBACK_IN_PROGRESS",
"DELETE_IN_PROGRESS",
"UPDATE_COMPLETE_CLEANUP_IN_PROGRESS",
"UPDATE_IN_PROGRESS",
"UPDATE_ROLLBACK_IN_PROGRESS",
"UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS",
"REVIEW_IN_PROGRESS",
"IMPORT_IN_PROGRESS",
"IMPORT_ROLLBACK_IN_PROGRESS",
]
29 changes: 26 additions & 3 deletions servicecatalog_puppet/workflow/general/get_ssm_param_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from deepmerge import always_merger

from servicecatalog_puppet import config
from servicecatalog_puppet import constants
from servicecatalog_puppet.workflow import dependency
from servicecatalog_puppet.workflow import tasks
from servicecatalog_puppet.workflow.general import boto3_task
Expand Down Expand Up @@ -204,6 +205,28 @@ def get_parameters_tasks(self):
ssm_parameter_name = ssm_parameter_name.replace(
"${AWS::AccountId}", self.account_id
)
parameter_depends_on_all = param_details.get("ssm").get(
"depends_on", []
)
spoke_account_id_to_use = ""
spoke_region_to_use = ""

for parameter_depends_on in parameter_depends_on_all:
parameter_depends_on_affinity = parameter_depends_on.get(
"affinity", parameter_depends_on.get("type")
)
if parameter_depends_on_affinity == constants.AFFINITY_ACCOUNT:
spoke_account_id_to_use = self.account_id
spoke_region_to_use = ""
elif parameter_depends_on_affinity == constants.AFFINITY_REGION:
spoke_account_id_to_use = ""
spoke_region_to_use = self.region
elif (
parameter_depends_on_affinity
== constants.AFFINITY_ACCOUNT_AND_REGION
):
spoke_account_id_to_use = self.account_id
spoke_region_to_use = self.region

ssm_params[param_name] = GetSSMParamTask(
parameter_name=param_name,
Expand All @@ -214,11 +237,11 @@ def get_parameters_tasks(self):
default_value=param_details.get("ssm").get("default_value"),
path=param_details.get("ssm").get("path", ""),
recursive=param_details.get("ssm").get("recursive", True),
depends_on=param_details.get("ssm").get("depends_on", []),
depends_on=parameter_depends_on_all,
manifest_file_path=self.manifest_file_path,
puppet_account_id=self.puppet_account_id,
spoke_account_id=self.account_id,
spoke_region=self.region,
spoke_account_id=spoke_account_id_to_use,
spoke_region=spoke_region_to_use,
)

if param_details.get("boto3"):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def params_for_results_display(self):
"key": self.key,
"region": self.region,
"version_id": self.version_id,
"cache_invalidator": self.cache_invalidator,
}

def run(self):
Expand Down
78 changes: 24 additions & 54 deletions servicecatalog_puppet/workflow/stack/provision_stack_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,55 +171,29 @@ def stack_name_to_use(self):
return self.stack_name

def ensure_stack_is_in_complete_status(self):
current_stack = dict(StackStatus="DoesntExist")
waiting = "NotARealStatus"
current_stack = dict(StackStatus=waiting)
with self.spoke_regional_client("cloudformation") as cloudformation:
try:
paginator = cloudformation.get_paginator("describe_stacks")
for page in paginator.paginate(StackName=self.stack_name_to_use,):
for stack in page.get("Stacks", []):
status = stack.get("StackStatus")
if status in [
"CREATE_IN_PROGRESS",
"ROLLBACK_IN_PROGRESS",
"DELETE_IN_PROGRESS",
"UPDATE_IN_PROGRESS",
"UPDATE_COMPLETE_CLEANUP_IN_PROGRESS",
"UPDATE_ROLLBACK_IN_PROGRESS",
"UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS",
"IMPORT_ROLLBACK_IN_PROGRESS",
"REVIEW_IN_PROGRESS",
"IMPORT_IN_PROGRESS",
"CREATE_FAILED",
"ROLLBACK_FAILED",
"DELETE_FAILED",
"UPDATE_ROLLBACK_FAILED",
"IMPORT_ROLLBACK_FAILED",
]:
while status not in [
"ROLLBACK_COMPLETE",
"CREATE_COMPLETE",
"UPDATE_ROLLBACK_COMPLETE",
"DELETE_COMPLETE",
"UPDATE_COMPLETE",
"IMPORT_COMPLETE",
"IMPORT_ROLLBACK_COMPLETE",
]:
time.sleep(5)
sub_paginator = cloudformation.get_paginator(
"describe_stacks"
)
for sub_page in sub_paginator.paginate(
StackName=stack.get("StackId"),
):
for sub_stack in sub_page.get("Stacks", []):
status = sub_stack.get("StackStatus")
current_stack = stack
except ClientError as error:
while current_stack.get(
"StackStatus"
) in constants.CLOUDFORMATION_IN_PROGRESS_STATUS + [waiting]:
stacks = cloudformation.describe_stacks(
StackName=self.stack_name_to_use
).get("Stacks", [])
assert len(stacks) == 1
current_stack = stacks[0]

if (
error.response["Error"]["Message"]
!= f"Stack with id {self.stack_name_to_use} does not exist"
current_stack.get("StackStatus")
in constants.CLOUDFORMATION_IN_PROGRESS_STATUS
):
raise error
time.sleep(5)

if current_stack.get("StackStatus") in constants.CLOUDFORMATION_UNHAPPY_STATUS:
raise Exception(
f"stack {self.stack_name} is in state {current_stack.get('StackStatus')}"
)

return current_stack

def run(self):
Expand Down Expand Up @@ -264,19 +238,13 @@ def run(self):

existing_stack_params_dict = dict()
existing_template = ""
if status in [
"CREATE_COMPLETE",
"UPDATE_ROLLBACK_COMPLETE",
"UPDATE_COMPLETE",
"IMPORT_COMPLETE",
"IMPORT_ROLLBACK_COMPLETE",
]:
if status in constants.CLOUDFORMATION_HAPPY_STATUS:
with self.spoke_regional_client("cloudformation") as cloudformation:
existing_stack_params_dict = {}
summary_response = cloudformation.get_template_summary(
StackName=self.stack_name_to_use,
)
for parameter in summary_response.get("Parameters"):
for parameter in summary_response.get("Parameters", []):
existing_stack_params_dict[
parameter.get("ParameterKey")
] = parameter.get("DefaultValue")
Expand All @@ -301,6 +269,8 @@ def run(self):
if status == "UPDATE_ROLLBACK_COMPLETE":
need_to_provision = True
else:
print(f"existing_stack_params_dict is {existing_stack_params_dict}")
print(f"params_to_use is {params_to_use}")
if existing_stack_params_dict == params_to_use:
self.info(f"params unchanged")
if template_to_use == cfn_tools.dump_yaml(existing_template):
Expand Down
1 change: 1 addition & 0 deletions servicecatalog_puppet/workflow/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,7 @@ def on_task_failure(task, exception):

def print_stats():
mem = psutil.virtual_memory()

logger.info(
f"memory usage: total={math.ceil(mem.total / 1024 / 1024)}MB used={math.ceil(mem.used / 1024 / 1024)}MB percent={mem.percent}%"
)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@

setup_kwargs = {
'name': 'aws-service-catalog-puppet',
'version': '0.180.1',
'version': '0.181.0',
'description': 'Making it easier to deploy ServiceCatalog products',
'long_description': '# aws-service-catalog-puppet\n\n![logo](./docs/logo.png) \n\n## Badges\n\n[![codecov](https://codecov.io/gh/awslabs/aws-service-catalog-puppet/branch/master/graph/badge.svg?token=e8M7mdsmy0)](https://codecov.io/gh/awslabs/aws-service-catalog-puppet)\n\n\n## What is it?\nThis is a python3 framework that makes it easier to share multi region AWS Service Catalog portfolios and makes it \npossible to provision products into accounts declaratively using a metadata based rules engine.\n\nWith this framework you define your accounts in a YAML file. You give each account a set of tags, a default region and \na set of enabled regions.\n\nOnce you have done this you can define portfolios should be shared with each set of accounts using the tags and you \ncan specify which regions the shares occur in.\n\nIn addition to this, you can also define products that should be provisioned into accounts using the same tag based \napproach. The framework will assume role into the target account and provision the product on your behalf.\n\n\n## Getting started\n\nYou can read the [installation how to](https://service-catalog-tools-workshop.com/30-how-tos/10-installation/30-service-catalog-puppet.html)\nor you can read through the [every day use](https://service-catalog-tools-workshop.com/30-how-tos/50-every-day-use.html)\nguides.\n\nYou can read the [documentation](https://aws-service-catalog-puppet.readthedocs.io/en/latest/) to understand the inner \nworkings. \n\n\n## Going further\n\nThe framework is one of a pair. The other is [aws-service-catalog-factory](https://github.com/awslabs/aws-service-catalog-factory).\nWith Service Catalog Factory you can create pipelines that deploy multi region portfolios very easily. \n\n## License\n\nThis library is licensed under the Apache 2.0 License. \n \n',
'author': 'Eamonn Faherty',
Expand Down

0 comments on commit 4cedd83

Please sign in to comment.