Skip to content

Commit

Permalink
fixing issues #593 and #595
Browse files Browse the repository at this point in the history
  • Loading branch information
eamonnfaherty committed Nov 15, 2022
1 parent 45db83e commit 7aaccb8
Show file tree
Hide file tree
Showing 9 changed files with 72 additions and 60 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

[tool.poetry]
name = "aws-service-catalog-puppet"
version = "0.204.0"
version = "0.205.0"
description = "Making it easier to deploy ServiceCatalog products"
classifiers = ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Natural Language :: English"]
homepage = "https://service-catalog-tools-workshop.com/"
Expand Down
2 changes: 1 addition & 1 deletion servicecatalog_puppet/commands/task_reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -1686,7 +1686,7 @@ def deploy_from_task_reference(path):

executor_account_id = config.get_executor_account_id()
is_dry_run = is_list_launches = False
execution_mode = "hub"
execution_mode = config.get_execution_mode()
on_complete_url = config.get_on_complete_url()
running_exploded = False
output_cache_starting_point = ""
Expand Down
4 changes: 4 additions & 0 deletions servicecatalog_puppet/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,10 @@ def get_executor_account_id():
return os.environ.get(environmental_variables.EXECUTOR_ACCOUNT_ID)


def get_execution_mode():
return os.environ.get(environmental_variables.EXECUTION_MODE)


def get_should_use_eventbridge():
return (
os.environ.get(
Expand Down
40 changes: 21 additions & 19 deletions servicecatalog_puppet/waluigi/processes/topological_generations.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ def on_task_processing_time(task_processing_time_queue, complete_event):
logger.info("shutting down")


def on_task_trace(task_trace_queue, complete_event, puppet_account_id):
def on_task_trace(task_trace_queue, complete_event, puppet_account_id, execution_mode):
bucket = f"sc-puppet-log-store-{puppet_account_id}"
key_prefix = f"{os.getenv('CODEBUILD_BUILD_ID', f'local/{os.getenv(environmental_variables.CACHE_INVALIDATOR)}')}/traces"
with betterboto_client.CrossAccountClientContextManager(
Expand All @@ -361,23 +361,24 @@ def on_task_trace(task_trace_queue, complete_event, puppet_account_id):
except queue.Empty:
continue
else:
tz = (t - float(os.getenv("SCT_START_TIME", 0))) * 1000000
task_reference = task_params.get("task_reference")
s3.put_object(
Bucket=bucket,
Key=f"{key_prefix}/{tz}-{graph.escape(task_reference)}-{'start' if is_start else 'end'}.json",
Body=serialisation_utils.json_dumps(
{
"name": task_reference,
"cat": task_type,
"ph": "B" if is_start else "E",
"pid": 1,
"tid": thread_name,
"ts": tz,
"args": unwrap(task_params),
}
),
)
if execution_mode != constants.EXECUTION_MODE_SPOKE:
tz = (t - float(os.getenv("SCT_START_TIME", 0))) * 1000000
task_reference = task_params.get("task_reference")
s3.put_object(
Bucket=bucket,
Key=f"{key_prefix}/{tz}-{graph.escape(task_reference)}-{'start' if is_start else 'end'}.json",
Body=serialisation_utils.json_dumps(
{
"name": task_reference,
"cat": task_type,
"ph": "B" if is_start else "E",
"pid": 1,
"tid": thread_name,
"ts": tz,
"args": unwrap(task_params),
}
),
)

logger.info("shutting down")

Expand All @@ -388,6 +389,7 @@ def run(
manifest_files_path,
manifest_task_reference_file_path,
puppet_account_id,
execution_mode,
):
resources_file_path = f"{manifest_files_path}/resources.json"
os.environ["SCT_START_TIME"] = str(time.time())
Expand Down Expand Up @@ -439,7 +441,7 @@ def run(
on_task_trace_thread = multiprocessing.Process(
name="on_task_trace",
target=on_task_trace,
args=(task_trace_queue, complete_event, puppet_account_id),
args=(task_trace_queue, complete_event, puppet_account_id, execution_mode),
)
on_task_processing_time_thread.start()
on_task_trace_thread.start()
Expand Down
32 changes: 16 additions & 16 deletions servicecatalog_puppet/waluigi/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,22 +42,22 @@ class WaluigiTaskMixin:
def execute(self):
if self.should_use_caching:
if self.complete():
for task_reference, output in (
self.input().get("reference_dependencies", {}).items()
):
s3_url = output.path.split("/")
bucket = s3_url[2]
key = "/".join(s3_url[3:])
if key.endswith("latest.json"):
target = key
else:
target = ".".join(key.split(".")[0:-1])
target_dir = target.replace("/latest.json", "")
if not os.path.exists(target_dir):
os.makedirs(target_dir)
if not os.path.exists(target):
with self.hub_client("s3") as s3:
s3.download_file(Bucket=bucket, Key=key, Filename=target)
s3_location = self.output().path
s3_url = s3_location.split("/")
bucket = s3_url[2]
key = "/".join(s3_url[3:])
if key.endswith("latest.json"):
target = key
else:
target = ".".join(key.split(".")[0:-1])
target_dir = target.replace("/latest.json", "")
if not os.path.exists(target_dir):
os.makedirs(target_dir)
if not os.path.exists(target):
with self.hub_client("s3") as s3:
s3.download_file(Bucket=bucket, Key=key, Filename=target)
if not os.path.exists(target):
raise Exception(f"{target} was not downloaded from the cache")
else:
self.run()
self.execute()
Expand Down
41 changes: 22 additions & 19 deletions servicecatalog_puppet/waluigi/threads/topological_generations.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,8 @@ def on_task_processing_time(task_processing_time_queue, complete_event):
logger.info("shutting down")


def on_task_trace(task_trace_queue, complete_event, puppet_account_id):
def on_task_trace(task_trace_queue, complete_event, puppet_account_id, execution_mode):
logger.info(f"execution_mode: {execution_mode}")
bucket = f"sc-puppet-log-store-{puppet_account_id}"
key_prefix = f"{os.getenv('CODEBUILD_BUILD_ID', f'local/{os.getenv(environmental_variables.CACHE_INVALIDATOR)}')}/traces"
with betterboto_client.CrossAccountClientContextManager(
Expand All @@ -362,23 +363,24 @@ def on_task_trace(task_trace_queue, complete_event, puppet_account_id):
except queue.Empty:
continue
else:
tz = (t - float(os.getenv("SCT_START_TIME", 0))) * 1000000
task_reference = task_params.get("task_reference")
s3.put_object(
Bucket=bucket,
Key=f"{key_prefix}/{tz}-{graph.escape(task_reference)}-{'start' if is_start else 'end'}.json",
Body=serialisation_utils.json_dumps(
{
"name": task_reference,
"cat": task_type,
"ph": "B" if is_start else "E",
"pid": 1,
"tid": thread_name,
"ts": tz,
"args": unwrap(task_params),
}
),
)
if execution_mode != constants.EXECUTION_MODE_SPOKE:
tz = (t - float(os.getenv("SCT_START_TIME", 0))) * 1000000
task_reference = task_params.get("task_reference")
s3.put_object(
Bucket=bucket,
Key=f"{key_prefix}/{tz}-{graph.escape(task_reference)}-{'start' if is_start else 'end'}.json",
Body=serialisation_utils.json_dumps(
{
"name": task_reference,
"cat": task_type,
"ph": "B" if is_start else "E",
"pid": 1,
"tid": thread_name,
"ts": tz,
"args": unwrap(task_params),
}
),
)

logger.info("shutting down")

Expand All @@ -389,6 +391,7 @@ def run(
manifest_files_path,
manifest_task_reference_file_path,
puppet_account_id,
execution_mode,
):
resources_file_path = f"{manifest_files_path}/resources.json"
os.environ["SCT_START_TIME"] = str(time.time())
Expand Down Expand Up @@ -448,7 +451,7 @@ def run(
on_task_trace_thread = threading.Thread(
name="on_task_trace",
target=on_task_trace,
args=(task_trace_queue, complete_event, puppet_account_id),
args=(task_trace_queue, complete_event, puppet_account_id, execution_mode),
)
on_task_processing_time_thread.start()
on_task_trace_thread.start()
Expand Down
1 change: 1 addition & 0 deletions servicecatalog_puppet/workflow/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def run_tasks(
manifest_files_path,
manifest_task_reference_file_path,
puppet_account_id,
execution_mode,
)

cache_invalidator = os.environ.get(environmental_variables.CACHE_INVALIDATOR)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def params_for_results_display(self):

def run(self):
with self.hub_client("s3") as s3:
regional_template = self.key.replace("-${AWS::Region}", f"-{self.region}-ffff")
regional_template = self.key.replace("-${AWS::Region}", f"-{self.region}")
global_template = self.key.replace("-${AWS::Region}", "")
p = dict(Bucket=self.bucket)
if self.version_id != "":
Expand All @@ -42,8 +42,10 @@ def run(self):
try:
s3.download_file(Key=regional_template, Filename=output, **p)
except botocore.exceptions.ClientError as e:
if "404" == str(e.response['Error']['Code']):
self.info(f"Didnt find regional template: {regional_template}, will try global: {global_template}")
if "404" == str(e.response["Error"]["Code"]):
self.info(
f"Didnt find regional template: {regional_template}, will try global: {global_template}"
)
s3.download_file(Key=global_template, Filename=output, **p)
else:
raise e
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@

setup_kwargs = {
'name': 'aws-service-catalog-puppet',
'version': '0.204.0',
'version': '0.205.0',
'description': 'Making it easier to deploy ServiceCatalog products',
'long_description': '# aws-service-catalog-puppet\n\n![logo](./docs/logo.png) \n\n## Badges\n\n[![codecov](https://codecov.io/gh/awslabs/aws-service-catalog-puppet/branch/master/graph/badge.svg?token=e8M7mdsmy0)](https://codecov.io/gh/awslabs/aws-service-catalog-puppet)\n\n\n## What is it?\nThis is a python3 framework that makes it easier to share multi region AWS Service Catalog portfolios and makes it \npossible to provision products into accounts declaratively using a metadata based rules engine.\n\nWith this framework you define your accounts in a YAML file. You give each account a set of tags, a default region and \na set of enabled regions.\n\nOnce you have done this you can define portfolios should be shared with each set of accounts using the tags and you \ncan specify which regions the shares occur in.\n\nIn addition to this, you can also define products that should be provisioned into accounts using the same tag based \napproach. The framework will assume role into the target account and provision the product on your behalf.\n\n\n## Getting started\n\nYou can read the [installation how to](https://service-catalog-tools-workshop.com/30-how-tos/10-installation/30-service-catalog-puppet.html)\nor you can read through the [every day use](https://service-catalog-tools-workshop.com/30-how-tos/50-every-day-use.html)\nguides.\n\nYou can read the [documentation](https://aws-service-catalog-puppet.readthedocs.io/en/latest/) to understand the inner \nworkings. \n\n\n## Going further\n\nThe framework is one of a pair. The other is [aws-service-catalog-factory](https://github.com/awslabs/aws-service-catalog-factory).\nWith Service Catalog Factory you can create pipelines that deploy multi region portfolios very easily. \n\n## License\n\nThis library is licensed under the Apache 2.0 License. \n \n',
'author': 'Eamonn Faherty',
Expand Down

0 comments on commit 7aaccb8

Please sign in to comment.