Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions integration/combination/test_function_with_msk.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,20 @@ def test_function_with_msk_trigger_and_s3_onfailure_events_destinations(self):
"combination/function_with_msk_trigger_and_s3_onfailure_events_destinations", parameters
)

def test_function_with_msk_trigger_and_confluent_schema_registry(self):
def test_function_with_msk_trigger_and_premium_features(self):
companion_stack_outputs = self.companion_stack_outputs
parameters = self.get_parameters(companion_stack_outputs)
cluster_name = "MskCluster4-" + generate_suffix()
parameters.append(self.generate_parameter("MskClusterName4", cluster_name))
self._common_validations_for_MSK(
self._common_validations_for_MSK("combination/function_with_msk_trigger_and_premium_features", parameters)
event_source_mapping_result = self._common_validations_for_MSK(
"combination/function_with_msk_trigger_and_confluent_schema_registry", parameters
)
# Verify error handling properties are correctly set
self.assertTrue(event_source_mapping_result.get("BisectBatchOnFunctionError"))
self.assertEqual(event_source_mapping_result.get("MaximumRecordAgeInSeconds"), 3600)
self.assertEqual(event_source_mapping_result.get("MaximumRetryAttempts"), 3)
self.assertEqual(event_source_mapping_result.get("FunctionResponseTypes"), ["ReportBatchItemFailures"])

def _common_validations_for_MSK(self, file_name, parameters):
self.create_and_verify_stack(file_name, parameters)
Expand All @@ -74,6 +80,7 @@ def _common_validations_for_MSK(self, file_name, parameters):

self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)
self.assertEqual(event_source_mapping_kafka_cluster_arn, msk_cluster_arn)
return event_source_mapping_result

def get_parameters(self, dictionary):
parameters = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ class TestFunctionWithSelfManagedKafka(BaseTest):
@pytest.mark.flaky(reruns=5)
@parameterized.expand(
[
"combination/function_with_self_managed_kafka",
"combination/function_with_self_managed_kafka_intrinsics",
]
)
Expand All @@ -30,3 +29,29 @@ def test_function_with_self_managed_kafka(self, file_name):
event_source_mapping_result = lambda_client.get_event_source_mapping(UUID=event_source_mapping_id)
event_source_mapping_function_arn = event_source_mapping_result["FunctionArn"]
self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)

@parameterized.expand(["combination/function_with_self_managed_kafka"])
def test_function_with_self_managed_kafka_with_provisioned_mode(self, file_name):
self.create_and_verify_stack(file_name)
# Get the notification configuration and make sure Lambda Function connection is added
lambda_client = self.client_provider.lambda_client
function_name = self.get_physical_id_by_type("AWS::Lambda::Function")
lambda_function_arn = lambda_client.get_function_configuration(FunctionName=function_name)["FunctionArn"]
event_source_mapping_id = self.get_physical_id_by_type("AWS::Lambda::EventSourceMapping")
event_source_mapping_result = lambda_client.get_event_source_mapping(UUID=event_source_mapping_id)
event_source_mapping_function_arn = event_source_mapping_result["FunctionArn"]
self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)

# Verify error handling properties are correctly set
self.assertTrue(event_source_mapping_result.get("BisectBatchOnFunctionError"))
self.assertEqual(event_source_mapping_result.get("MaximumRecordAgeInSeconds"), 3600)
self.assertEqual(event_source_mapping_result.get("MaximumRetryAttempts"), 3)
self.assertEqual(event_source_mapping_result.get("FunctionResponseTypes"), ["ReportBatchItemFailures"])
# Uncomment this once SDK is updated.
# provisioned_poller_config = event_source_mapping_result["ProvisionedPollerConfig"]
# actual_poller_group_name = provisioned_poller_config["PollerGroupName"]
# self.assertEqual(
# actual_poller_group_name,
# "test1",
# f"Expected PollerGroupName to be 'test1' but got '{actual_poller_group_name}'",
# )
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
[
{
"LogicalResourceId": "MyApi",
"ResourceType": "AWS::ApiGateway::RestApi"
},
{
"LogicalResourceId": "MyApiDeployment",
"ResourceType": "AWS::ApiGateway::Deployment"
},
{
"LogicalResourceId": "MyApiProdStage",
"ResourceType": "AWS::ApiGateway::Stage"
},
{
"LogicalResourceId": "ApiGatewayDomainName",
"ResourceType": "AWS::ApiGateway::DomainName"
},
{
"LogicalResourceId": "MyApiBasePathMapping",
"ResourceType": "AWS::ApiGateway::BasePathMapping"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[
{
"LogicalResourceId": "MyApi",
"ResourceType": "AWS::ApiGateway::RestApi"
},
{
"LogicalResourceId": "MyApiDeployment",
"ResourceType": "AWS::ApiGateway::Deployment"
},
{
"LogicalResourceId": "MyApiProdStage",
"ResourceType": "AWS::ApiGateway::Stage"
}
]
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,17 @@ Resources:
Ref: MyMskCluster
Topics:
- SchemaRegistryTestTopic
DestinationConfig:
OnFailure:
Type: Kafka
Destination: kafka://testTopic
ProvisionedPollerConfig:
MinimumPollers: 1
BisectBatchOnFunctionError: true
MaximumRecordAgeInSeconds: 3600
MaximumRetryAttempts: 3
FunctionResponseTypes:
- ReportBatchItemFailures
SchemaRegistryConfig:
AccessConfigs:
- Type: BASIC_AUTH
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,18 @@ Resources:
- 123.45.67.89:9096
Topics:
- Topic1
ProvisionedPollerConfig:
MinimumPollers: 1
PollerGroupName: test1
SourceAccessConfigurations:
- Type: BASIC_AUTH
URI:
Ref: KafkaUserSecret
BisectBatchOnFunctionError: true
MaximumRecordAgeInSeconds: 3600
MaximumRetryAttempts: 3
FunctionResponseTypes:
- ReportBatchItemFailures

KafkaUserSecret:
Type: AWS::SecretsManager::Secret
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
Parameters:
IpAddressType:
Type: String
Default: dualstack
DomainName:
Type: String
CertificateArn:
Type: String

Resources:
MyApi:
Type: AWS::Serverless::Api
Properties:
StageName: Prod
DefinitionUri: ${definitionuri}
Domain:
DomainName: !Ref DomainName
CertificateArn: !Ref CertificateArn
EndpointConfiguration: REGIONAL
IpAddressType: !Ref IpAddressType

Metadata:
SamTransformTest: true
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
Parameters:
IpAddressType:
Type: String
Default: ipv4

Resources:
MyApi:
Type: AWS::Serverless::Api
Properties:
StageName: Prod
DefinitionUri: ${definitionuri}
EndpointConfiguration:
Type: REGIONAL
IpAddressType: !Ref IpAddressType
Metadata:
SamTransformTest: true
37 changes: 37 additions & 0 deletions integration/single/test_api_with_domain_ipaddresstype.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from unittest.case import skipIf

from integration.config.service_names import CUSTOM_DOMAIN
from integration.helpers.base_internal_test import BaseInternalTest
from integration.helpers.base_test import nonblocking
from integration.helpers.resource import current_region_not_included


@skipIf(
current_region_not_included([CUSTOM_DOMAIN]),
"Custom domain is not supported in this testing region",
)
@nonblocking
class TestApiWithDomainIpAddressType(BaseInternalTest):
"""
Test AWS::Serverless::Api with IpAddressType in Domain configuration
"""

def test_api_with_domain_ipaddresstype(self):
"""
Creates an API with custom domain and IpAddressType set to dualstack
"""
self.create_and_verify_stack("single/api_with_domain_ipaddresstype")

# Verify the domain name resource
domain_name_id = self.get_physical_id_by_type("AWS::ApiGateway::DomainName")
api_gateway_client = self.client_provider.api_client
result = api_gateway_client.get_domain_name(domainName=domain_name_id)

# Verify endpoint configuration
end_point_config = result["endpointConfiguration"]
end_point_types = end_point_config["types"]
self.assertEqual(1, len(end_point_types))
self.assertEqual("REGIONAL", end_point_types[0])

# Verify IpAddressType is set correctly
self.assertEqual("dualstack", end_point_config["ipAddressType"])
25 changes: 25 additions & 0 deletions integration/single/test_api_with_ipaddresstype.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from unittest.case import skipIf

from integration.config.service_names import REST_API
from integration.helpers.base_test import BaseTest
from integration.helpers.resource import current_region_does_not_support


@skipIf(current_region_does_not_support([REST_API]), "Rest API is not supported in this testing region")
class TestApiWithIpAddressType(BaseTest):
"""
Test AWS::Serverless::Api with IpAddressType in EndpointConfiguration
"""

def test_api_with_ipaddresstype(self):
"""
Creates an API with IpAddressType set to ipv4
"""
parameters = [{"ParameterKey": "IpAddressType", "ParameterValue": "ipv4"}]
self.create_and_verify_stack("single/api_with_ipaddresstype", parameters)

rest_api_id = self.get_physical_id_by_type("AWS::ApiGateway::RestApi")
rest_api = self.client_provider.api_client.get_rest_api(restApiId=rest_api_id)

self.assertEqual(rest_api["endpointConfiguration"]["types"], ["REGIONAL"])
self.assertEqual(rest_api["endpointConfiguration"]["ipAddressType"], "ipv4")
2 changes: 1 addition & 1 deletion samtranslator/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "1.102.0"
__version__ = "1.103.0"
10 changes: 10 additions & 0 deletions samtranslator/internal/schema_source/aws_serverless_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,11 @@ class Domain(BaseModel):
EndpointConfiguration: Optional[SamIntrinsicable[Literal["REGIONAL", "EDGE", "PRIVATE"]]] = domain(
"EndpointConfiguration"
)
IpAddressType: Optional[PassThroughProp] = passthrough_prop(
DOMAIN_STEM,
"IpAddressType",
["AWS::ApiGateway::DomainName.EndpointConfiguration", "IpAddressType"],
)
MutualTlsAuthentication: Optional[PassThroughProp] = passthrough_prop(
DOMAIN_STEM,
"MutualTlsAuthentication",
Expand Down Expand Up @@ -223,6 +228,11 @@ class EndpointConfiguration(BaseModel):
"VPCEndpointIds",
["AWS::ApiGateway::RestApi.EndpointConfiguration", "VpcEndpointIds"],
)
IpAddressType: Optional[PassThroughProp] = passthrough_prop(
ENDPOINT_CONFIGURATION_STEM,
"IpAddressType",
["AWS::ApiGateway::RestApi.EndpointConfiguration", "IpAddressType"],
)


Name = Optional[PassThroughProp]
Expand Down
10 changes: 10 additions & 0 deletions samtranslator/internal/schema_source/aws_serverless_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,6 +423,10 @@ class MSKEventProperties(BaseModel):
DestinationConfig: Optional[PassThroughProp] # TODO: add documentation
ProvisionedPollerConfig: Optional[PassThroughProp]
SchemaRegistryConfig: Optional[PassThroughProp]
BisectBatchOnFunctionError: Optional[PassThroughProp] = mskeventproperties("BisectBatchOnFunctionError")
FunctionResponseTypes: Optional[PassThroughProp] = mskeventproperties("FunctionResponseTypes")
MaximumRecordAgeInSeconds: Optional[PassThroughProp] = mskeventproperties("MaximumRecordAgeInSeconds")
MaximumRetryAttempts: Optional[PassThroughProp] = mskeventproperties("MaximumRetryAttempts")


class MSKEvent(BaseModel):
Expand Down Expand Up @@ -463,6 +467,12 @@ class SelfManagedKafkaEventProperties(BaseModel):
Topics: PassThroughProp = selfmanagedkafkaeventproperties("Topics")
ProvisionedPollerConfig: Optional[PassThroughProp]
SchemaRegistryConfig: Optional[PassThroughProp]
BisectBatchOnFunctionError: Optional[PassThroughProp] = selfmanagedkafkaeventproperties(
"BisectBatchOnFunctionError"
)
MaximumRecordAgeInSeconds: Optional[PassThroughProp] = selfmanagedkafkaeventproperties("MaximumRecordAgeInSeconds")
MaximumRetryAttempts: Optional[PassThroughProp] = selfmanagedkafkaeventproperties("MaximumRetryAttempts")
FunctionResponseTypes: Optional[PassThroughProp] = selfmanagedkafkaeventproperties("FunctionResponseTypes")


class SelfManagedKafkaEvent(BaseModel):
Expand Down
Loading