Skip to content

Commit

Permalink
Release 1.63.0 (to main) #3067
Browse files Browse the repository at this point in the history
Release 1.63.0 (to main)
  • Loading branch information
GavinZZ committed Mar 27, 2023
2 parents 1e5b158 + efdc6cb commit 1e29fc8
Show file tree
Hide file tree
Showing 160 changed files with 9,363 additions and 1,144 deletions.
1 change: 1 addition & 0 deletions .cfnlintrc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -125,3 +125,4 @@ ignore_checks:
- E2531 # Deprecated runtime; not relevant for transform tests
- W2531 # EOL runtime; not relevant for transform tests
- E3001 # Invalid or unsupported Type; common in transform tests since they focus on SAM resources
- W2001 # Parameter not used
3 changes: 3 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ on:
- "feat-*"
pull_request:
workflow_dispatch:
merge_group:
types: [checks_requested]

jobs:
build:
Expand All @@ -22,6 +24,7 @@ jobs:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/check_compatibility.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ name: Check compatibility

on:
pull_request:
merge_group:
types: [checks_requested]

jobs:
check-compatibility:
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ on:
pull_request:
# The branches below must be a subset of the branches above
branches: [ "develop" ]
merge_group:
types: [checks_requested]
schedule:
- cron: '43 23 * * 2'

Expand Down
2 changes: 1 addition & 1 deletion bin/_file_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def config_additional_args(cls) -> None: # noqa: empty-method-without-abstract-
"""Optionally configure additional args to arg parser."""

def process_file(self, file_path: str) -> None:
with open(file_path, "r", encoding="utf-8") as f:
with open(file_path, encoding="utf-8") as f:
file_str = f.read()
try:
formatted_file_str = self.format_str(file_str)
Expand Down
6 changes: 4 additions & 2 deletions bin/add_transform_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import shutil
import subprocess
import sys
from copy import deepcopy
from pathlib import Path
from typing import Any, Dict

Expand Down Expand Up @@ -74,7 +75,7 @@ def replace_aws_partition(partition: str, file_path: str) -> None:
def generate_transform_test_output_files(input_file_path: str, file_basename: str) -> None:
output_file_option = file_basename + ".json"

with open(os.path.join(input_file_path), "r") as f:
with open(os.path.join(input_file_path)) as f:
manifest = yaml_parse(f) # type: ignore[no-untyped-call]

transform_test_output_paths = {
Expand All @@ -86,7 +87,8 @@ def generate_transform_test_output_files(input_file_path: str, file_basename: st
for partition, (region, output_path) in transform_test_output_paths.items():
# Set Boto Session Region to guarantee the same hash input as transform tests for API deployment id
ArnGenerator.BOTO_SESSION_REGION_NAME = region
output_fragment = transform(manifest, {}, ManagedPolicyLoader(iam_client))
# Implicit API Plugin may alter input template file, thus passing a copy here.
output_fragment = transform(deepcopy(manifest), {}, ManagedPolicyLoader(iam_client))

if not CLI_OPTIONS.disable_api_configuration and partition != "aws":
output_fragment = add_regional_endpoint_configuration_if_needed(output_fragment)
Expand Down
16 changes: 13 additions & 3 deletions bin/public_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,20 @@
import pkgutil
import sys
from pathlib import Path
from typing import Any, Dict, List, NamedTuple, Set, Union
from typing import Any, Dict, List, NamedTuple, Optional, Set, Union

_ARGUMENT_SELF = {"kind": "POSITIONAL_OR_KEYWORD", "name": "self"}


class InterfaceScanner:
def __init__(self) -> None:
def __init__(self, skipped_modules: Optional[List[str]] = None) -> None:
self.signatures: Dict[str, Union[inspect.Signature]] = {}
self.variables: Set[str] = set()
self.skipped_modules: Set[str] = set(skipped_modules or [])

def scan_interfaces_recursively(self, module_name: str) -> None:
if module_name in self.skipped_modules:
return
self._scan_interfaces_in_module(module_name)
for submodule in pkgutil.iter_modules([module_name.replace(".", os.path.sep)]):
submodule_name = module_name + "." + submodule.name
Expand Down Expand Up @@ -212,13 +215,20 @@ def main() -> None:
subparsers = parser.add_subparsers(dest="command")
extract = subparsers.add_parser("extract", help="Extract public interfaces")
extract.add_argument("--module", help="The module to extract public interfaces", type=str, default="samtranslator")
extract.add_argument(
"--skipped-module",
help="The modules that should be skipped",
type=str,
nargs="*",
default=["samtranslator.internal"],
)
check = subparsers.add_parser("check", help="Check public interface changes")
check.add_argument("original_json", help="The original public interface JSON file", type=Path)
check.add_argument("new_json", help="The new public interface JSON file", type=Path)
args = parser.parse_args()

if args.command == "extract":
scanner = InterfaceScanner()
scanner = InterfaceScanner(skipped_modules=args.skipped_module)
scanner.scan_interfaces_recursively(args.module)
_print(scanner.signatures, scanner.variables)
elif args.command == "check":
Expand Down
4 changes: 2 additions & 2 deletions bin/sam-translate.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"--output-template",
help="Location to store resulting CloudFormation template [default: transformed-template.json].",
type=Path,
default=Path("transformed-template.yaml"),
default=Path("transformed-template.json"),
)
parser.add_argument(
"--s3-bucket",
Expand Down Expand Up @@ -102,7 +102,7 @@ def package(input_file_path, output_file_path): # type: ignore[no-untyped-def]


def transform_template(input_file_path, output_file_path): # type: ignore[no-untyped-def]
with open(input_file_path, "r") as f:
with open(input_file_path) as f:
sam_template = yaml_parse(f) # type: ignore[no-untyped-call]

try:
Expand Down
11 changes: 3 additions & 8 deletions integration/combination/test_api_settings.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
import hashlib
from pathlib import Path
from unittest.case import skipIf

from integration.config.service_names import REST_API
from integration.helpers.resource import current_region_does_not_support

try:
from pathlib import Path
except ImportError:
from pathlib2 import Path

from parameterized import parameterized

from integration.config.service_names import REST_API
from integration.helpers.base_test import BaseTest
from integration.helpers.resource import current_region_does_not_support


@skipIf(current_region_does_not_support([REST_API]), "Rest API is not supported in this testing region")
Expand Down
2 changes: 1 addition & 1 deletion integration/combination/test_api_with_authorizer_apikey.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def verify_authorized_request(
status = response.status_code
if status != expected_status_code:
raise StatusCodeError(
"Request to {} failed with status: {}, expected status: {}".format(url, status, expected_status_code)
f"Request to {url} failed with status: {status}, expected status: {expected_status_code}"
)

if not header_key or not header_value:
Expand Down
2 changes: 1 addition & 1 deletion integration/combination/test_api_with_authorizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ def verify_authorized_request(

if status != expected_status_code:
raise StatusCodeError(
"Request to {} failed with status: {}, expected status: {}".format(url, status, expected_status_code)
f"Request to {url} failed with status: {status}, expected status: {expected_status_code}"
)

if not header_key or not header_value:
Expand Down
1 change: 1 addition & 0 deletions integration/combination/test_api_with_cors.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ class TestApiWithCors(BaseTest):
[
"combination/api_with_cors",
"combination/api_with_cors_openapi",
"combination/api_with_cors_and_apikey",
]
)
def test_cors(self, file_name):
Expand Down
4 changes: 2 additions & 2 deletions integration/combination/test_function_with_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ def test_function_with_api(self):

self.assertTrue(
get_api_policy_expectation in policy,
"{} should be present in policy {}".format(get_api_policy_expectation, policy),
f"{get_api_policy_expectation} should be present in policy {policy}",
)
self.assertTrue(
post_api_policy_expectation in policy,
"{} should be present in policy {}".format(post_api_policy_expectation, policy),
f"{post_api_policy_expectation} should be present in policy {policy}",
)
9 changes: 7 additions & 2 deletions integration/combination/test_function_with_sns.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@
@skipIf(current_region_does_not_support([SNS]), "SNS is not supported in this testing region")
class TestFunctionWithSns(BaseTest):
def test_function_with_sns_bucket_trigger(self):
self.create_and_verify_stack("combination/function_with_sns")
template_file_path = "combination/function_with_sns"
self.skip_using_service_detector(template_file_path)
self.create_and_verify_stack(template_file_path)

sns_client = self.client_provider.sns_client

Expand All @@ -33,7 +35,9 @@ def test_function_with_sns_bucket_trigger(self):
self.assertEqual(sqs_subscription["TopicArn"], sns_topic_arn)

def test_function_with_sns_intrinsics(self):
self.create_and_verify_stack("combination/function_with_sns_intrinsics")
template_file_path = "combination/function_with_sns_intrinsics"
self.skip_using_service_detector(template_file_path)
self.create_and_verify_stack(template_file_path)

sns_client = self.client_provider.sns_client

Expand All @@ -51,3 +55,4 @@ def test_function_with_sns_intrinsics(self):
subscription_arn = subscription["SubscriptionArn"]
subscription_attributes = sns_client.get_subscription_attributes(SubscriptionArn=subscription_arn)
self.assertEqual(subscription_attributes["Attributes"]["FilterPolicy"], '{"price_usd":[{"numeric":["<",100]}]}')
self.assertEqual(subscription_attributes["Attributes"]["FilterPolicyScope"], "MessageAttributes")
4 changes: 2 additions & 2 deletions integration/combination/test_intrinsic_function_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ def test_severless_api_properties_support(self):

self.assertTrue(
get_api_policy_expectation in policy,
"{} should be present in policy {}".format(get_api_policy_expectation, policy),
f"{get_api_policy_expectation} should be present in policy {policy}",
)
self.assertTrue(
post_api_policy_expectation in policy,
"{} should be present in policy {}".format(post_api_policy_expectation, policy),
f"{post_api_policy_expectation} should be present in policy {policy}",
)

# Test for tags
Expand Down
2 changes: 1 addition & 1 deletion integration/config/service_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
SNS = "SNS"
SQS = "SQS"
CUSTOM_DOMAIN = "CustomDomain"
ARM = "ARM"
EFS = "EFS"
S3_EVENTS = "S3Events"
SELF_MANAGED_KAFKA = "SelfManagedKafka"
Expand All @@ -36,3 +35,4 @@
EPHEMERAL_STORAGE = "EphemeralStorage"
API_KEY = "ApiKey"
APP_SYNC = "AppSync"
SNS_FILTER_POLICY_SCOPE = "SnsFilterPolicyScope"
16 changes: 6 additions & 10 deletions integration/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import time
from pathlib import Path

import boto3
import botocore
Expand All @@ -18,11 +19,6 @@
from integration.helpers.stack import Stack
from integration.helpers.yaml_utils import load_yaml

try:
from pathlib import Path
except ImportError:
from pathlib2 import Path

LOG = logging.getLogger(__name__)

COMPANION_STACK_NAME = "sam-integ-stack-companion"
Expand Down Expand Up @@ -175,16 +171,16 @@ def upload_resources(get_s3):

def get_s3_uri(file_name, uri_type, bucket, region):
if uri_type == "s3":
return "s3://{}/{}".format(bucket, file_name)
return f"s3://{bucket}/{file_name}"

if region == "us-east-1":
return "https://s3.amazonaws.com/{}/{}".format(bucket, file_name)
return f"https://s3.amazonaws.com/{bucket}/{file_name}"
if region == "us-iso-east-1":
return "https://s3.us-iso-east-1.c2s.ic.gov/{}/{}".format(bucket, file_name)
return f"https://s3.us-iso-east-1.c2s.ic.gov/{bucket}/{file_name}"
if region == "us-isob-east-1":
return "https://s3.us-isob-east-1.sc2s.sgov.gov/{}/{}".format(bucket, file_name)
return f"https://s3.us-isob-east-1.sc2s.sgov.gov/{bucket}/{file_name}"

return "https://s3-{}.amazonaws.com/{}/{}".format(region, bucket, file_name)
return f"https://s3-{region}.amazonaws.com/{bucket}/{file_name}"


@pytest.fixture()
Expand Down
18 changes: 6 additions & 12 deletions integration/helpers/base_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@
import logging
import os
import shutil
from pathlib import Path
from unittest.case import TestCase

import boto3
import botocore
import pytest
import requests
Expand All @@ -20,6 +23,7 @@

from integration.config.logger_configurations import LoggingConfiguration
from integration.helpers.client_provider import ClientProvider
from integration.helpers.deployer.deployer import Deployer
from integration.helpers.deployer.exceptions.exceptions import ThrottlingError
from integration.helpers.deployer.utils.retry import retry_with_exponential_backoff_and_jitter
from integration.helpers.exception import StatusCodeError
Expand All @@ -32,18 +36,8 @@
verify_stack_resources,
)
from integration.helpers.s3_uploader import S3Uploader
from integration.helpers.yaml_utils import dump_yaml, load_yaml

try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
from unittest.case import TestCase

import boto3

from integration.helpers.deployer.deployer import Deployer
from integration.helpers.template import transform_template
from integration.helpers.yaml_utils import dump_yaml, load_yaml

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -422,7 +416,7 @@ def _fill_template(self, folder, file_name):
data = f.read()
for key, _ in self.code_key_to_file.items():
# We must double the {} to escape them so they will survive a round of unescape
data = data.replace("${{{}}}".format(key), self.get_code_key_s3_uri(key))
data = data.replace(f"${{{key}}}", self.get_code_key_s3_uri(key))
yaml_doc = yaml_parse(data)

dump_yaml(updated_template_path, yaml_doc)
Expand Down
10 changes: 5 additions & 5 deletions integration/helpers/deployer/deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def create_changeset(
"ChangeSetType": changeset_type,
"Parameters": parameter_values,
"Capabilities": capabilities,
"Description": "Created by SAM CLI at {0} UTC".format(datetime.utcnow().isoformat()),
"Description": f"Created by SAM CLI at {datetime.utcnow().isoformat()} UTC",
"Tags": tags,
}

Expand Down Expand Up @@ -172,7 +172,7 @@ def _create_change_set(self, stack_name, changeset_type, **kwargs):
except botocore.exceptions.ClientError as ex:
if "The bucket you are attempting to access must be addressed using the specified endpoint" in str(ex):
raise deploy_exceptions.DeployBucketInDifferentRegionError(
"Failed to create/update stack {}".format(stack_name)
f"Failed to create/update stack {stack_name}"
)
raise deploy_exceptions.ChangeSetError(stack_name=stack_name, msg=str(ex))

Expand Down Expand Up @@ -278,7 +278,7 @@ def wait_for_changeset(self, changeset_id, stack_name):
raise deploy_exceptions.ChangeEmptyError(stack_name=stack_name)

raise deploy_exceptions.ChangeSetError(
stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason)
stack_name=stack_name, msg=f"ex: {ex} Status: {status}. Reason: {reason}"
)

def execute_changeset(self, changeset_id, stack_name):
Expand Down Expand Up @@ -323,7 +323,7 @@ def wait_for_execute(self, stack_name, changeset_type):
elif changeset_type == "UPDATE":
waiter = self._client.get_waiter("stack_update_complete")
else:
raise RuntimeError("Invalid changeset type {0}".format(changeset_type))
raise RuntimeError(f"Invalid changeset type {changeset_type}")

# Poll every 30 seconds. Polling too frequently risks hitting rate limits
# on CloudFormation's DescribeStacks API
Expand Down Expand Up @@ -408,7 +408,7 @@ def get_stack_outputs(self, stack_name, echo=True):
try:
outputs = stacks_description["Stacks"][0]["Outputs"]
if echo:
sys.stdout.write("\nStack {stack_name} outputs:\n".format(stack_name=stack_name))
sys.stdout.write(f"\nStack {stack_name} outputs:\n")
sys.stdout.flush()
self._display_stack_outputs(stack_outputs=outputs)
return outputs
Expand Down
1 change: 0 additions & 1 deletion integration/helpers/deployer/utils/artifact_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Logic for uploading to S3 per Cloudformation Specific Resource
This was ported over from the sam-cli repo
"""
# pylint: disable=no-member

# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
Expand Down
Loading

0 comments on commit 1e29fc8

Please sign in to comment.