Skip to content

Commit

Permalink
Merge pull request #3135 from aws/release-v1.66.0
Browse files Browse the repository at this point in the history
Release 1.66.0 (to main)
  • Loading branch information
ssenchenko committed Apr 27, 2023
2 parents 5252262 + a832112 commit 8bfcb92
Show file tree
Hide file tree
Showing 32 changed files with 32,774 additions and 5,804 deletions.
40 changes: 19 additions & 21 deletions bin/_file_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import sys
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Type


Expand Down Expand Up @@ -44,31 +45,28 @@ def file_extension() -> str:
def config_additional_args(cls) -> None: # noqa: empty-method-without-abstract-decorator
"""Optionally configure additional args to arg parser."""

def process_file(self, file_path: str) -> None:
with open(file_path, encoding="utf-8") as f:
file_str = f.read()
try:
formatted_file_str = self.format_str(file_str)
except self.decode_exception() as error:
raise ValueError(f"{file_path}: Cannot decode the file content") from error
except Exception as error:
raise ValueError(f"{file_path}: Fail to process") from error
def process_file(self, file_path: Path) -> None:
file_str = file_path.read_text(encoding="utf-8")
try:
formatted_file_str = self.format_str(file_str)
except self.decode_exception() as error:
raise ValueError(f"{file_path}: Cannot decode the file content") from error
except Exception as error:
raise ValueError(f"{file_path}: Fail to process") from error
if file_str != formatted_file_str:
if self.args.write:
with open(file_path, "w", encoding="utf-8") as f:
f.write(formatted_file_str)
Path(file_path).write_text(formatted_file_str, encoding="utf-8")
print(f"reformatted {file_path}")
if self.args.check:
print(f"would reformat {file_path}")
self.unformatted_file_count += 1
self.scanned_file_found += 1

def process_directory(self, directory_path: str) -> None:
def process_directory(self, directory_path: Path) -> None:
for root, _dirs, files in os.walk(directory_path):
for file in files:
file_path = os.path.join(root, file)
_, extension = os.path.splitext(file_path)
if extension != self.file_extension():
file_path = Path(root) / file
if file_path.suffix != self.file_extension():
continue
self.process_file(file_path)

Expand Down Expand Up @@ -112,15 +110,15 @@ def main(cls) -> None:
args = cls.arg_parser.parse_args()
formatter = cls(args)

for path in args.paths:
if not os.path.exists(path):
for _path in args.paths:
path = Path(_path)
if not path.exists():
raise ValueError(f"{path}: No such file or directory")
if os.path.isfile(path):
_, extension = os.path.splitext(path)
if extension != cls.file_extension():
if path.is_file():
if path.suffix != cls.file_extension():
raise ValueError(f"{path}: Not a format-able file")
formatter.process_file(path)
elif os.path.isdir(path):
elif path.is_dir():
formatter.process_directory(path)
else:
raise ValueError(f"{path}: Unsupported path")
Expand Down
48 changes: 23 additions & 25 deletions bin/add_transform_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
"""Automatically create transform tests input and output files given an input template."""
import argparse
import json
import os
import shutil
import subprocess
import sys
Expand All @@ -17,8 +16,8 @@
from samtranslator.translator.transform import transform
from samtranslator.yaml_helper import yaml_parse

SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
TRANSFORM_TEST_DIR = os.path.join(SCRIPT_DIR, "..", "tests", "translator")
SCRIPT_DIR = Path(__file__).parent
TRANSFORM_TEST_DIR = SCRIPT_DIR.parent / "tests" / "translator"

iam_client = boto3.client("iam")

Expand All @@ -42,13 +41,13 @@
CLI_OPTIONS = parser.parse_args()


def read_json_file(file_path: str) -> Dict[str, Any]:
template: Dict[str, Any] = json.loads(Path(file_path).read_text(encoding="utf-8"))
def read_json_file(file_path: Path) -> Dict[str, Any]:
template: Dict[str, Any] = json.loads(file_path.read_text(encoding="utf-8"))
return template


def write_json_file(obj: Dict[str, Any], file_path: str) -> None:
with open(file_path, "w", encoding="utf-8") as f:
def write_json_file(obj: Dict[str, Any], file_path: Path) -> None:
with file_path.open("w", encoding="utf-8") as f:
json.dump(obj, f, indent=2, sort_keys=True)


Expand All @@ -64,24 +63,23 @@ def add_regional_endpoint_configuration_if_needed(template: Dict[str, Any]) -> D
return template


def replace_aws_partition(partition: str, file_path: str) -> None:
def replace_aws_partition(partition: str, file_path: Path) -> None:
template = read_json_file(file_path)
with open(file_path, "w") as file:
updated_template = json.loads(json.dumps(template).replace("arn:aws:", f"arn:{partition}:"))
file.write(json.dumps(updated_template, indent=2))
updated_template = json.loads(json.dumps(template).replace("arn:aws:", f"arn:{partition}:"))
file_path.write_text(json.dumps(updated_template, indent=2), encoding="utf-8")
print(f"Transform Test output files generated {file_path}")


def generate_transform_test_output_files(input_file_path: str, file_basename: str) -> None:
def generate_transform_test_output_files(input_file_path: Path, file_basename: str) -> None:
output_file_option = file_basename + ".json"

with open(os.path.join(input_file_path)) as f:
with input_file_path.open(encoding="utf-8") as f:
manifest = yaml_parse(f) # type: ignore[no-untyped-call]

transform_test_output_paths = {
"aws": ("us-west-2", os.path.join(TRANSFORM_TEST_DIR, "output", output_file_option)),
"aws-cn": ("cn-north-1 ", os.path.join(TRANSFORM_TEST_DIR, "output/aws-cn/", output_file_option)),
"aws-us-gov": ("us-gov-west-1", os.path.join(TRANSFORM_TEST_DIR, "output/aws-us-gov/", output_file_option)),
"aws": ("us-west-2", TRANSFORM_TEST_DIR / "output" / output_file_option),
"aws-cn": ("cn-north-1 ", TRANSFORM_TEST_DIR / "output" / "aws-cn" / output_file_option),
"aws-us-gov": ("us-gov-west-1", TRANSFORM_TEST_DIR / "output" / "aws-us-gov" / output_file_option),
}

for partition, (region, output_path) in transform_test_output_paths.items():
Expand All @@ -100,18 +98,18 @@ def generate_transform_test_output_files(input_file_path: str, file_basename: st
replace_aws_partition(partition, output_path)


def get_input_file_path() -> str:
def get_input_file_path() -> Path:
input_file_option = str(CLI_OPTIONS.template_file)
return os.path.join(os.getcwd(), input_file_option)
return Path.cwd() / input_file_option


def copy_input_file_to_transform_test_dir(input_file_path: str, transform_test_input_path: str) -> None:
def copy_input_file_to_transform_test_dir(input_file_path: Path, transform_test_input_path: Path) -> None:
shutil.copyfile(input_file_path, transform_test_input_path)
print(f"Transform Test input file generated {transform_test_input_path}")


def verify_input_template(input_file_path: str): # type: ignore[no-untyped-def]
if "arn:aws:" in Path(input_file_path).read_text(encoding="utf-8"):
def verify_input_template(input_file_path: Path) -> None:
if "arn:aws:" in input_file_path.read_text(encoding="utf-8"):
print(
"WARNING: hardcoded partition name detected. Consider replace it with pseudo parameter {AWS::Partition}",
file=sys.stderr,
Expand All @@ -120,23 +118,23 @@ def verify_input_template(input_file_path: str): # type: ignore[no-untyped-def]

def format_test_files() -> None:
subprocess.run(
[sys.executable, os.path.join(SCRIPT_DIR, "json-format.py"), "--write", "tests"],
[sys.executable, SCRIPT_DIR / "json-format.py", "--write", "tests"],
check=True,
)

subprocess.run(
[sys.executable, os.path.join(SCRIPT_DIR, "yaml-format.py"), "--write", "tests"],
[sys.executable, SCRIPT_DIR / "yaml-format.py", "--write", "tests"],
check=True,
)


def main() -> None:
input_file_path = get_input_file_path()
file_basename = Path(input_file_path).stem
file_basename = input_file_path.stem

verify_input_template(input_file_path)

transform_test_input_path = os.path.join(TRANSFORM_TEST_DIR, "input", file_basename + ".yaml")
transform_test_input_path = TRANSFORM_TEST_DIR / "input" / (file_basename + ".yaml")
copy_input_file_to_transform_test_dir(input_file_path, transform_test_input_path)

generate_transform_test_output_files(transform_test_input_path, file_basename)
Expand Down
6 changes: 3 additions & 3 deletions bin/json-format.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env python
"""JSON file formatter (without prettier)."""
import os
import sys
from pathlib import Path

my_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, my_path + "/..")
# To allow this script to be executed from other directories
sys.path.insert(0, str(Path(__file__).absolute().parent.parent))

import json
from typing import Type
Expand Down
31 changes: 19 additions & 12 deletions bin/public_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,19 @@
This CLI tool helps automate the detection of compatibility-breaking changes.
"""
import argparse
import ast
import importlib
import inspect
import json
import os.path
import pkgutil
import string
import sys
from pathlib import Path
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union

_ARGUMENT_SELF = {"kind": "POSITIONAL_OR_KEYWORD", "name": "self"}
_PRINTABLE_CHARS = set(string.printable)


class InterfaceScanner:
Expand Down Expand Up @@ -51,20 +54,24 @@ def _scan_functions_in_module(self, module_name: str) -> None:

def _scan_variables_in_module(self, module_name: str) -> None:
"""
There is no method to verify if a module attribute is a constant,
After some experiment, here we assume if an attribute is a value
(without `__module__`) and not a module itself is a constant.
Use ast to find all assignments at the module level to find constants.
Note: Class (and other types) should be treated as a variable too
"""
for constant_name, _ in inspect.getmembers(
importlib.import_module(module_name),
lambda obj: not hasattr(obj, "__module__") and not inspect.ismodule(obj),
):
if constant_name.startswith("_"):
continue
full_path = f"{module_name}.{constant_name}"
self.variables.add(full_path)
module_path = Path(module_name.replace(".", os.path.sep))
if module_path.is_dir():
module_path /= "__init__.py"
else:
module_path = module_path.with_suffix(".py")
tree = ast.parse("".join([char for char in module_path.read_text() if char in _PRINTABLE_CHARS]))
assignments: List[ast.Assign] = [node for node in ast.iter_child_nodes(tree) if isinstance(node, ast.Assign)]
for assignment in assignments:
for target in assignment.targets:
if not isinstance(target, ast.Name):
continue
if target.id.startswith("_"):
continue
full_path = f"{module_name}.{target.id}"
self.variables.add(full_path)

for class_name, _class in inspect.getmembers(importlib.import_module(module_name), inspect.isclass):
# Skip imported and ones starting with "_"
Expand Down
44 changes: 20 additions & 24 deletions bin/sam-translate.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,17 @@
import argparse
import json
import logging
import os
import platform
import subprocess
import sys
from functools import reduce
from pathlib import Path
from typing import List

import boto3

# To allow this script to be executed from other directories
my_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, my_path + "/..")
sys.path.insert(0, str(Path(__file__).absolute().parent.parent))

from samtranslator.model.exceptions import InvalidDocumentException
from samtranslator.public.translator import ManagedPolicyLoader
Expand Down Expand Up @@ -67,7 +66,7 @@
logging.basicConfig()


def execute_command(command, args): # type: ignore[no-untyped-def]
def execute_command(command: str, args: List[str]) -> None:
try:
aws_cmd = "aws" if platform.system().lower() != "windows" else "aws.cmd"
command_with_args = [aws_cmd, "cloudformation", command, *list(args)]
Expand All @@ -83,9 +82,9 @@ def execute_command(command, args): # type: ignore[no-untyped-def]
sys.exit(e.returncode)


def package(input_file_path, output_file_path): # type: ignore[no-untyped-def]
def package(input_file_path: Path) -> Path:
template_file = input_file_path
package_output_template_file = input_file_path + "._sam_packaged_.yaml"
package_output_template_file = Path(str(input_file_path) + "._sam_packaged_.yaml")
s3_bucket = cli_options.s3_bucket
args = [
"--template-file",
Expand All @@ -96,50 +95,47 @@ def package(input_file_path, output_file_path): # type: ignore[no-untyped-def]
s3_bucket,
]

execute_command("package", args) # type: ignore[no-untyped-call]
execute_command("package", args)

return package_output_template_file


def transform_template(input_file_path, output_file_path): # type: ignore[no-untyped-def]
with open(input_file_path) as f:
def transform_template(input_file_path: Path, output_file_path: Path): # type: ignore[no-untyped-def]
with input_file_path.open() as f:
sam_template = yaml_parse(f) # type: ignore[no-untyped-call]

try:
cloud_formation_template = transform(sam_template, {}, ManagedPolicyLoader(iam_client))
cloud_formation_template_prettified = json.dumps(cloud_formation_template, indent=1)

with open(output_file_path, "w") as f:
f.write(cloud_formation_template_prettified)
output_file_path.write_text(cloud_formation_template_prettified, encoding="utf-8")

print("Wrote transformed CloudFormation template to: " + output_file_path)
print("Wrote transformed CloudFormation template to: ", output_file_path)
except InvalidDocumentException as e:
error_message = reduce(lambda message, error: message + " " + error.message, e.causes, e.message)
LOG.error(error_message)
errors = (cause.message for cause in e.causes)
LOG.error(errors)


def deploy(template_file): # type: ignore[no-untyped-def]
def deploy(template_file: Path) -> None:
capabilities = cli_options.capabilities
stack_name = cli_options.stack_name
args = ["--template-file", template_file, "--capabilities", capabilities, "--stack-name", stack_name]

execute_command("deploy", args) # type: ignore[no-untyped-call]

return package_output_template_file
execute_command("deploy", args)


if __name__ == "__main__":
input_file_path = str(cli_options.template_file)
output_file_path = str(cli_options.output_template)
input_file_path = Path(cli_options.template_file)
output_file_path = Path(cli_options.output_template)

if cli_options.command == "package":
package_output_template_file = package(input_file_path, output_file_path) # type: ignore[no-untyped-call]
transform_template(package_output_template_file, output_file_path) # type: ignore[no-untyped-call]
package_output_template_file = package(input_file_path)
transform_template(package_output_template_file, output_file_path)
elif cli_options.command == "deploy":
package_output_template_file = package(input_file_path, output_file_path) # type: ignore[no-untyped-call]
transform_template(package_output_template_file, output_file_path) # type: ignore[no-untyped-call]
deploy(output_file_path) # type: ignore[no-untyped-call]
package_output_template_file = package(input_file_path)
transform_template(package_output_template_file, output_file_path)
deploy(output_file_path)
else:
transform_template(input_file_path, output_file_path) # type: ignore[no-untyped-call]
transform_template(input_file_path, output_file_path)
6 changes: 3 additions & 3 deletions bin/transform-test-error-json-format.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
It makes error json easier to review by breaking down "errorMessage"
into list of strings (delimiter: ". ").
"""
import os
import sys
from pathlib import Path

from typing_extensions import Final

my_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, my_path + "/..")
# To allow this script to be executed from other directories
sys.path.insert(0, str(Path(__file__).absolute().parent.parent))

import json
from typing import Type
Expand Down
Loading

0 comments on commit 8bfcb92

Please sign in to comment.