diff --git a/clients/aws-sdk-bedrock-runtime/CHANGELOG.md b/clients/aws-sdk-bedrock-runtime/CHANGELOG.md index f1e628e..f7cc37e 100644 --- a/clients/aws-sdk-bedrock-runtime/CHANGELOG.md +++ b/clients/aws-sdk-bedrock-runtime/CHANGELOG.md @@ -2,6 +2,24 @@ ## Unreleased +## v0.2.0 + +### API Changes +* Add support to automatically enforce safeguards across accounts within an AWS Organization. +* This release includes support for Search Results. +* Amazon Bedrock Runtime Service Tier Support Launch +* Add support for system tool and web citation response. + +### Enhancements +* Add Standard Retry Mode + +### Dependencies + +* **Updated**: `smithy_aws_core[eventstream, json]` from `~=0.1.0` to `~=0.2.0`. +* **Updated**: `smithy_core` from `~=0.1.0` to `~=0.2.0`. +* **Updated**: `smithy_http[awscrt]~=0.3.0` from `~=0.2.0` to `~=0.3.0`. + + ## v0.1.1 ### API Changes diff --git a/clients/aws-sdk-bedrock-runtime/docs/conf.py b/clients/aws-sdk-bedrock-runtime/docs/conf.py index ca1e0d3..8a68fa8 100644 --- a/clients/aws-sdk-bedrock-runtime/docs/conf.py +++ b/clients/aws-sdk-bedrock-runtime/docs/conf.py @@ -7,7 +7,7 @@ project = "Amazon Bedrock Runtime" author = "Amazon Web Services" -release = "0.1.1" +release = "0.2.0" extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/AppliedGuardrailDetails.rst b/clients/aws-sdk-bedrock-runtime/docs/models/AppliedGuardrailDetails.rst new file mode 100644 index 0000000..9385c50 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/AppliedGuardrailDetails.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +AppliedGuardrailDetails +======================= + +.. autoclass:: aws_sdk_bedrock_runtime.models.AppliedGuardrailDetails + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/CitationLocationSearchResultLocation.rst b/clients/aws-sdk-bedrock-runtime/docs/models/CitationLocationSearchResultLocation.rst new file mode 100644 index 0000000..5ad6c4a --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/CitationLocationSearchResultLocation.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _CitationLocationSearchResultLocation: + +CitationLocationSearchResultLocation +==================================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.CitationLocationSearchResultLocation diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/CitationLocationWeb.rst b/clients/aws-sdk-bedrock-runtime/docs/models/CitationLocationWeb.rst new file mode 100644 index 0000000..1b07822 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/CitationLocationWeb.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _CitationLocationWeb: + +CitationLocationWeb +=================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.CitationLocationWeb diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockDeltaToolResult.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockDeltaToolResult.rst new file mode 100644 index 0000000..83efbbb --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockDeltaToolResult.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ContentBlockDeltaToolResult: + +ContentBlockDeltaToolResult +=========================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ContentBlockDeltaToolResult diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockSearchResult.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockSearchResult.rst new file mode 100644 index 0000000..5334d7b --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockSearchResult.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ContentBlockSearchResult: + +ContentBlockSearchResult +======================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ContentBlockSearchResult diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockStartToolResult.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockStartToolResult.rst new file mode 100644 index 0000000..e326ab0 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ContentBlockStartToolResult.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ContentBlockStartToolResult: + +ContentBlockStartToolResult +=========================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ContentBlockStartToolResult diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultBlock.rst b/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultBlock.rst new file mode 100644 index 0000000..1f5fe97 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultBlock.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +SearchResultBlock +================= + +.. autoclass:: aws_sdk_bedrock_runtime.models.SearchResultBlock + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultContentBlock.rst b/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultContentBlock.rst new file mode 100644 index 0000000..475c25a --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultContentBlock.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +SearchResultContentBlock +======================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.SearchResultContentBlock + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultLocation.rst b/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultLocation.rst new file mode 100644 index 0000000..ac45633 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/SearchResultLocation.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +SearchResultLocation +==================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.SearchResultLocation + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ServiceTier.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ServiceTier.rst new file mode 100644 index 0000000..1a11ec4 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ServiceTier.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +ServiceTier +=========== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ServiceTier + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/SystemTool.rst b/clients/aws-sdk-bedrock-runtime/docs/models/SystemTool.rst new file mode 100644 index 0000000..a703f41 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/SystemTool.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +SystemTool +========== + +.. autoclass:: aws_sdk_bedrock_runtime.models.SystemTool + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDelta.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDelta.rst new file mode 100644 index 0000000..8c9fe53 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDelta.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ToolResultBlockDelta: + +ToolResultBlockDelta +==================== + +.. autodata:: aws_sdk_bedrock_runtime.models.ToolResultBlockDelta diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDeltaText.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDeltaText.rst new file mode 100644 index 0000000..668e34c --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDeltaText.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ToolResultBlockDeltaText: + +ToolResultBlockDeltaText +======================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ToolResultBlockDeltaText diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDeltaUnknown.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDeltaUnknown.rst new file mode 100644 index 0000000..a456726 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockDeltaUnknown.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ToolResultBlockDeltaUnknown: + +ToolResultBlockDeltaUnknown +=========================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ToolResultBlockDeltaUnknown diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockStart.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockStart.rst new file mode 100644 index 0000000..0762e80 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultBlockStart.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +ToolResultBlockStart +==================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ToolResultBlockStart + :members: diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultContentBlockSearchResult.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultContentBlockSearchResult.rst new file mode 100644 index 0000000..0dc1bc8 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ToolResultContentBlockSearchResult.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ToolResultContentBlockSearchResult: + +ToolResultContentBlockSearchResult +================================== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ToolResultContentBlockSearchResult diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/ToolSystemTool.rst b/clients/aws-sdk-bedrock-runtime/docs/models/ToolSystemTool.rst new file mode 100644 index 0000000..6c08af7 --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/ToolSystemTool.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ToolSystemTool: + +ToolSystemTool +============== + +.. autoclass:: aws_sdk_bedrock_runtime.models.ToolSystemTool diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/WebLocation.rst b/clients/aws-sdk-bedrock-runtime/docs/models/WebLocation.rst new file mode 100644 index 0000000..beb095f --- /dev/null +++ b/clients/aws-sdk-bedrock-runtime/docs/models/WebLocation.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +WebLocation +=========== + +.. autoclass:: aws_sdk_bedrock_runtime.models.WebLocation + :members: diff --git a/clients/aws-sdk-bedrock-runtime/pyproject.toml b/clients/aws-sdk-bedrock-runtime/pyproject.toml index fc92e4a..67f1c3d 100644 --- a/clients/aws-sdk-bedrock-runtime/pyproject.toml +++ b/clients/aws-sdk-bedrock-runtime/pyproject.toml @@ -3,7 +3,7 @@ [project] name = "aws_sdk_bedrock_runtime" -version = "0.1.1" +version = "0.2.0" description = "aws_sdk_bedrock_runtime client" readme = "README.md" requires-python = ">=3.12" @@ -24,9 +24,9 @@ classifiers = [ ] dependencies = [ - "smithy_aws_core[eventstream, json]~=0.1.0", - "smithy_core~=0.1.0", - "smithy_http[awscrt]~=0.2.0" + "smithy_aws_core[eventstream, json]~=0.2.0", + "smithy_core~=0.2.0", + "smithy_http[awscrt]~=0.3.0" ] [dependency-groups] diff --git a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/__init__.py b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/__init__.py index 6028b64..c8417da 100644 --- a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/__init__.py +++ b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/__init__.py @@ -1,3 +1,3 @@ # Code generated by smithy-python-codegen DO NOT EDIT. -__version__: str = "0.1.1" +__version__: str = "0.2.0" diff --git a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/_private/schemas.py b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/_private/schemas.py index adb2b70..103a5f5 100644 --- a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/_private/schemas.py +++ b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/_private/schemas.py @@ -747,7 +747,7 @@ ), Trait.new( id=ShapeID("smithy.api#pattern"), - value="^(([a-z0-9]+)|(arn:aws(-[^:]+)?:bedrock:[a-z0-9-]{1,20}:[0-9]{12}:guardrail/[a-z0-9]+))$", + value="^(|([a-z0-9]+)|(arn:aws(-[^:]+)?:bedrock:[a-z0-9-]{1,20}:[0-9]{12}:guardrail/[a-z0-9]+))$", ), ], ) @@ -757,7 +757,7 @@ shape_type=ShapeType.STRING, traits=[ Trait.new( - id=ShapeID("smithy.api#pattern"), value="^(([1-9][0-9]{0,7})|(DRAFT))$" + id=ShapeID("smithy.api#pattern"), value="^(|([1-9][0-9]{0,7})|(DRAFT))$" ) ], ) @@ -849,6 +849,90 @@ }, ) +GUARDRAIL_ARN = Schema( + id=ShapeID("com.amazonaws.bedrockruntime#GuardrailArn"), + shape_type=ShapeType.STRING, + traits=[ + Trait.new( + id=ShapeID("smithy.api#length"), value=MappingProxyType({"max": 2048}) + ), + Trait.new( + id=ShapeID("smithy.api#pattern"), + value="^(arn:aws(-[^:]+)?:bedrock:[a-z0-9-]{1,20}:[0-9]{12}:guardrail/[a-z0-9]+)$", + ), + ], +) + +GUARDRAIL_ID = Schema( + id=ShapeID("com.amazonaws.bedrockruntime#GuardrailId"), + shape_type=ShapeType.STRING, + traits=[ + Trait.new( + id=ShapeID("smithy.api#length"), value=MappingProxyType({"max": 2048}) + ), + Trait.new(id=ShapeID("smithy.api#pattern"), value="^([a-z0-9]+)$"), + ], +) + +GUARDRAIL_ORIGIN = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#GuardrailOrigin"), + shape_type=ShapeType.ENUM, + members={ + "REQUEST": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="REQUEST")], + }, + "ACCOUNT_ENFORCED": { + "target": UNIT, + "traits": [ + Trait.new(id=ShapeID("smithy.api#enumValue"), value="ACCOUNT_ENFORCED") + ], + }, + "ORGANIZATION_ENFORCED": { + "target": UNIT, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#enumValue"), value="ORGANIZATION_ENFORCED" + ) + ], + }, + }, +) + +GUARDRAIL_ORIGIN_LIST = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#GuardrailOriginList"), + shape_type=ShapeType.LIST, + members={"member": {"target": GUARDRAIL_ORIGIN}}, +) + +GUARDRAIL_OWNERSHIP = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#GuardrailOwnership"), + shape_type=ShapeType.ENUM, + members={ + "SELF": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="SELF")], + }, + "CROSS_ACCOUNT": { + "target": UNIT, + "traits": [ + Trait.new(id=ShapeID("smithy.api#enumValue"), value="CROSS_ACCOUNT") + ], + }, + }, +) + +APPLIED_GUARDRAIL_DETAILS = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#AppliedGuardrailDetails"), + members={ + "guardrailId": {"target": GUARDRAIL_ID}, + "guardrailVersion": {"target": GUARDRAIL_VERSION}, + "guardrailArn": {"target": GUARDRAIL_ARN}, + "guardrailOrigin": {"target": GUARDRAIL_ORIGIN_LIST}, + "guardrailOwnership": {"target": GUARDRAIL_OWNERSHIP}, + }, +) + AUTOMATED_REASONING_RULE_IDENTIFIER = Schema( id=ShapeID("com.amazonaws.bedrockruntime#AutomatedReasoningRuleIdentifier"), shape_type=ShapeType.STRING, @@ -1991,6 +2075,7 @@ "target": GUARDRAIL_AUTOMATED_REASONING_POLICY_ASSESSMENT }, "invocationMetrics": {"target": GUARDRAIL_INVOCATION_METRICS}, + "appliedGuardrailDetails": {"target": APPLIED_GUARDRAIL_DETAILS}, }, ) @@ -2090,11 +2175,17 @@ members={ "guardrailIdentifier": { "target": GUARDRAIL_IDENTIFIER, - "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + "traits": [ + Trait.new(id=ShapeID("smithy.api#default"), value=""), + Trait.new(id=ShapeID("smithy.api#addedDefault")), + ], }, "guardrailVersion": { "target": GUARDRAIL_VERSION, - "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + "traits": [ + Trait.new(id=ShapeID("smithy.api#default"), value=""), + Trait.new(id=ShapeID("smithy.api#addedDefault")), + ], }, "trace": { "target": GUARDRAIL_TRACE, @@ -2269,13 +2360,50 @@ }, ) +SEARCH_RESULT_LOCATION = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#SearchResultLocation"), + members={ + "searchResultIndex": { + "target": INTEGER, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#range"), value=MappingProxyType({"min": 0}) + ) + ], + }, + "start": { + "target": INTEGER, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#range"), value=MappingProxyType({"min": 0}) + ) + ], + }, + "end": { + "target": INTEGER, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#range"), value=MappingProxyType({"min": 0}) + ) + ], + }, + }, +) + +WEB_LOCATION = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#WebLocation"), + members={"url": {"target": STRING}, "domain": {"target": STRING}}, +) + CITATION_LOCATION = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#CitationLocation"), shape_type=ShapeType.UNION, members={ + "web": {"target": WEB_LOCATION}, "documentChar": {"target": DOCUMENT_CHAR_LOCATION}, "documentPage": {"target": DOCUMENT_PAGE_LOCATION}, "documentChunk": {"target": DOCUMENT_CHUNK_LOCATION}, + "searchResultLocation": {"target": SEARCH_RESULT_LOCATION}, }, ) @@ -2295,6 +2423,7 @@ id=ShapeID("com.amazonaws.bedrockruntime#Citation"), members={ "title": {"target": STRING}, + "source": {"target": STRING}, "sourceContent": {"target": CITATION_SOURCE_CONTENT_LIST}, "location": {"target": CITATION_LOCATION}, }, @@ -2619,6 +2748,41 @@ }, ) +SEARCH_RESULT_CONTENT_BLOCK = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#SearchResultContentBlock"), + members={ + "text": { + "target": STRING, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + } + }, +) + +SEARCH_RESULT_CONTENT_BLOCKS = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#SearchResultContentBlocks"), + shape_type=ShapeType.LIST, + members={"member": {"target": SEARCH_RESULT_CONTENT_BLOCK}}, +) + +SEARCH_RESULT_BLOCK = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#SearchResultBlock"), + members={ + "source": { + "target": STRING, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + }, + "title": { + "target": STRING, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + }, + "content": { + "target": SEARCH_RESULT_CONTENT_BLOCKS, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + }, + "citations": {"target": CITATIONS_CONFIG}, + }, +) + VIDEO_FORMAT = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#VideoFormat"), shape_type=ShapeType.ENUM, @@ -2701,6 +2865,7 @@ "image": {"target": IMAGE_BLOCK}, "document": {"target": DOCUMENT_BLOCK}, "video": {"target": VIDEO_BLOCK}, + "searchResult": {"target": SEARCH_RESULT_BLOCK}, }, ) @@ -2749,6 +2914,7 @@ "traits": [Trait.new(id=ShapeID("smithy.api#required"))], }, "status": {"target": TOOL_RESULT_STATUS}, + "type": {"target": STRING}, }, ) @@ -2764,6 +2930,19 @@ ], ) +TOOL_USE_TYPE = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#ToolUseType"), + shape_type=ShapeType.ENUM, + members={ + "SERVER_TOOL_USE": { + "target": UNIT, + "traits": [ + Trait.new(id=ShapeID("smithy.api#enumValue"), value="server_tool_use") + ], + } + }, +) + TOOL_USE_BLOCK = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#ToolUseBlock"), members={ @@ -2779,6 +2958,7 @@ "target": DOCUMENT, "traits": [Trait.new(id=ShapeID("smithy.api#required"))], }, + "type": {"target": TOOL_USE_TYPE}, }, ) @@ -2796,6 +2976,7 @@ "cachePoint": {"target": CACHE_POINT_BLOCK}, "reasoningContent": {"target": REASONING_CONTENT_BLOCK}, "citationsContent": {"target": CITATIONS_CONTENT_BLOCK}, + "searchResult": {"target": SEARCH_RESULT_BLOCK}, }, ) @@ -2937,6 +3118,35 @@ }, ) +SERVICE_TIER_TYPE = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#ServiceTierType"), + shape_type=ShapeType.ENUM, + members={ + "PRIORITY": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="priority")], + }, + "DEFAULT": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="default")], + }, + "FLEX": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="flex")], + }, + }, +) + +SERVICE_TIER = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#ServiceTier"), + members={ + "type": { + "target": SERVICE_TIER_TYPE, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + } + }, +) + SYSTEM_CONTENT_BLOCK = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#SystemContentBlock"), shape_type=ShapeType.UNION, @@ -2981,6 +3191,16 @@ }, ) +SYSTEM_TOOL = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#SystemTool"), + members={ + "name": { + "target": TOOL_NAME, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + } + }, +) + TOOL_INPUT_SCHEMA = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#ToolInputSchema"), shape_type=ShapeType.UNION, @@ -3007,6 +3227,7 @@ shape_type=ShapeType.UNION, members={ "toolSpec": {"target": TOOL_SPECIFICATION}, + "systemTool": {"target": SYSTEM_TOOL}, "cachePoint": {"target": CACHE_POINT_BLOCK}, }, ) @@ -3067,6 +3288,7 @@ }, "requestMetadata": {"target": REQUEST_METADATA}, "performanceConfig": {"target": PERFORMANCE_CONFIGURATION}, + "serviceTier": {"target": SERVICE_TIER}, }, ) @@ -3266,6 +3488,7 @@ "additionalModelResponseFields": {"target": DOCUMENT}, "trace": {"target": CONVERSE_TRACE}, "performanceConfig": {"target": PERFORMANCE_CONFIGURATION}, + "serviceTier": {"target": SERVICE_TIER}, }, ) @@ -3346,11 +3569,17 @@ members={ "guardrailIdentifier": { "target": GUARDRAIL_IDENTIFIER, - "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + "traits": [ + Trait.new(id=ShapeID("smithy.api#default"), value=""), + Trait.new(id=ShapeID("smithy.api#addedDefault")), + ], }, "guardrailVersion": { "target": GUARDRAIL_VERSION, - "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + "traits": [ + Trait.new(id=ShapeID("smithy.api#default"), value=""), + Trait.new(id=ShapeID("smithy.api#addedDefault")), + ], }, "trace": { "target": GUARDRAIL_TRACE, @@ -3397,6 +3626,7 @@ }, "requestMetadata": {"target": REQUEST_METADATA}, "performanceConfig": {"target": PERFORMANCE_CONFIGURATION}, + "serviceTier": {"target": SERVICE_TIER}, }, ) @@ -3424,6 +3654,7 @@ id=ShapeID("com.amazonaws.bedrockruntime#CitationsDelta"), members={ "title": {"target": STRING}, + "source": {"target": STRING}, "sourceContent": {"target": CITATION_SOURCE_CONTENT_LIST_DELTA}, "location": {"target": CITATION_LOCATION}, }, @@ -3440,6 +3671,18 @@ }, ) +TOOL_RESULT_BLOCK_DELTA = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#ToolResultBlockDelta"), + shape_type=ShapeType.UNION, + members={"text": {"target": STRING}}, +) + +TOOL_RESULT_BLOCKS_DELTA = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#ToolResultBlocksDelta"), + shape_type=ShapeType.LIST, + members={"member": {"target": TOOL_RESULT_BLOCK_DELTA}}, +) + TOOL_USE_BLOCK_DELTA = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#ToolUseBlockDelta"), members={ @@ -3456,6 +3699,7 @@ members={ "text": {"target": STRING}, "toolUse": {"target": TOOL_USE_BLOCK_DELTA}, + "toolResult": {"target": TOOL_RESULT_BLOCKS_DELTA}, "reasoningContent": {"target": REASONING_CONTENT_BLOCK_DELTA}, "citation": {"target": CITATIONS_DELTA}, }, @@ -3475,6 +3719,18 @@ }, ) +TOOL_RESULT_BLOCK_START = Schema.collection( + id=ShapeID("com.amazonaws.bedrockruntime#ToolResultBlockStart"), + members={ + "toolUseId": { + "target": TOOL_USE_ID, + "traits": [Trait.new(id=ShapeID("smithy.api#required"))], + }, + "type": {"target": STRING}, + "status": {"target": TOOL_RESULT_STATUS}, + }, +) + TOOL_USE_BLOCK_START = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#ToolUseBlockStart"), members={ @@ -3486,13 +3742,17 @@ "target": TOOL_NAME, "traits": [Trait.new(id=ShapeID("smithy.api#required"))], }, + "type": {"target": TOOL_USE_TYPE}, }, ) CONTENT_BLOCK_START = Schema.collection( id=ShapeID("com.amazonaws.bedrockruntime#ContentBlockStart"), shape_type=ShapeType.UNION, - members={"toolUse": {"target": TOOL_USE_BLOCK_START}}, + members={ + "toolUse": {"target": TOOL_USE_BLOCK_START}, + "toolResult": {"target": TOOL_RESULT_BLOCK_START}, + }, ) CONTENT_BLOCK_START_EVENT = Schema.collection( @@ -3571,6 +3831,7 @@ }, "trace": {"target": CONVERSE_STREAM_TRACE}, "performanceConfig": {"target": PERFORMANCE_CONFIGURATION}, + "serviceTier": {"target": SERVICE_TIER}, }, ) @@ -3758,6 +4019,15 @@ ), ], }, + "serviceTier": { + "target": SERVICE_TIER_TYPE, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-Bedrock-Service-Tier", + ) + ], + }, }, ) @@ -3794,6 +4064,15 @@ ) ], }, + "serviceTier": { + "target": SERVICE_TIER_TYPE, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-Bedrock-Service-Tier", + ) + ], + }, }, ) @@ -3994,6 +4273,15 @@ ), ], }, + "serviceTier": { + "target": SERVICE_TIER_TYPE, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-Bedrock-Service-Tier", + ) + ], + }, }, ) @@ -4054,6 +4342,15 @@ ) ], }, + "serviceTier": { + "target": SERVICE_TIER_TYPE, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-Bedrock-Service-Tier", + ) + ], + }, }, ) @@ -4079,6 +4376,8 @@ members={ "messages": {"target": MESSAGES}, "system": {"target": SYSTEM_CONTENT_BLOCKS}, + "toolConfig": {"target": TOOL_CONFIGURATION}, + "additionalModelRequestFields": {"target": DOCUMENT}, }, ) @@ -4617,7 +4916,7 @@ "builtIn": "AWS::Region", "required": False, "documentation": "The AWS region used to dispatch the request.", - "type": "String", + "type": "string", } ), "UseDualStack": MappingProxyType( @@ -4626,7 +4925,7 @@ "required": True, "default": False, "documentation": "When true, use the dual-stack endpoint. If the configured endpoint does not support dual-stack, dispatching the request MAY return an error.", - "type": "Boolean", + "type": "boolean", } ), "UseFIPS": MappingProxyType( @@ -4635,7 +4934,7 @@ "required": True, "default": False, "documentation": "When true, send this request to the FIPS-compliant regional endpoint. If the configured endpoint does not have a FIPS compliant endpoint, dispatching the request will return an error.", - "type": "Boolean", + "type": "boolean", } ), "Endpoint": MappingProxyType( @@ -4643,7 +4942,7 @@ "builtIn": "SDK::Endpoint", "required": False, "documentation": "Override the endpoint used to send this request", - "type": "String", + "type": "string", } ), } diff --git a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/client.py b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/client.py index 274fd23..763178c 100644 --- a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/client.py +++ b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/client.py @@ -7,6 +7,8 @@ from smithy_core.aio.eventstream import DuplexEventStream, OutputEventStream from smithy_core.exceptions import ExpectationNotMetError from smithy_core.interceptors import InterceptorChain +from smithy_core.interfaces.retries import RetryStrategy +from smithy_core.retries import RetryStrategyOptions, RetryStrategyResolver from smithy_core.types import TypedProperties from smithy_http.plugins import user_agent_plugin @@ -79,6 +81,8 @@ def __init__( for plugin in client_plugins: plugin(self._config) + self._retry_strategy_resolver = RetryStrategyResolver() + async def apply_guardrail( self, input: ApplyGuardrailInput, plugins: list[Plugin] | None = None ) -> ApplyGuardrailOutput: @@ -94,7 +98,6 @@ async def apply_guardrail( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -106,6 +109,24 @@ async def apply_guardrail( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -115,7 +136,7 @@ async def apply_guardrail( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -168,7 +189,6 @@ async def converse( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -180,6 +200,24 @@ async def converse( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -189,7 +227,7 @@ async def converse( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -250,7 +288,6 @@ async def converse_stream( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -262,6 +299,24 @@ async def converse_stream( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -271,7 +326,7 @@ async def converse_stream( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.output_stream( @@ -317,7 +372,6 @@ async def count_tokens( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -329,6 +383,24 @@ async def count_tokens( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -338,7 +410,7 @@ async def count_tokens( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -354,7 +426,6 @@ async def get_async_invoke( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -366,6 +437,24 @@ async def get_async_invoke( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -375,7 +464,7 @@ async def get_async_invoke( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -409,7 +498,6 @@ async def invoke_model( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -421,6 +509,24 @@ async def invoke_model( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -430,7 +536,7 @@ async def invoke_model( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -460,7 +566,6 @@ async def invoke_model_with_bidirectional_stream( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -472,6 +577,24 @@ async def invoke_model_with_bidirectional_stream( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -481,7 +604,7 @@ async def invoke_model_with_bidirectional_stream( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.duplex_stream( @@ -530,7 +653,6 @@ async def invoke_model_with_response_stream( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -542,6 +664,24 @@ async def invoke_model_with_response_stream( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -551,7 +691,7 @@ async def invoke_model_with_response_stream( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.output_stream( @@ -569,7 +709,6 @@ async def list_async_invokes( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -581,6 +720,24 @@ async def list_async_invokes( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -590,7 +747,7 @@ async def list_async_invokes( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -615,7 +772,6 @@ async def start_async_invoke( :param plugins: A list of callables that modify the configuration dynamically. Changes made by these plugins only apply for the duration of the operation execution and will not affect any other operation invocations. - """ operation_plugins: list[Plugin] = [] if plugins: @@ -627,6 +783,24 @@ async def start_async_invoke( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -636,7 +810,7 @@ async def start_async_invoke( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) diff --git a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/config.py b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/config.py index 6c986b4..ba72641 100644 --- a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/config.py +++ b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/config.py @@ -19,7 +19,7 @@ from smithy_core.interceptors import Interceptor from smithy_core.interfaces import URI from smithy_core.interfaces.retries import RetryStrategy -from smithy_core.retries import SimpleRetryStrategy +from smithy_core.retries import RetryStrategyOptions from smithy_core.shapes import ShapeID from smithy_http.aio.crt import AWSCRTHTTPClient from smithy_http.interfaces import HTTPRequestConfiguration @@ -78,7 +78,7 @@ @dataclass(init=False) class Config: - """Configuration for AmazonBedrockFrontendService.""" + """Configuration for Bedrock Runtime.""" auth_scheme_resolver: HTTPAuthSchemeResolver auth_schemes: dict[ShapeID, AuthScheme[Any, Any, Any, Any]] @@ -94,7 +94,7 @@ class Config: interceptors: list[_ServiceInterceptor] protocol: ClientProtocol[Any, Any] | None region: str | None - retry_strategy: RetryStrategy + retry_strategy: RetryStrategy | RetryStrategyOptions | None sdk_ua_app_id: str | None transport: ClientTransport[Any, Any] | None user_agent_extra: str | None @@ -117,12 +117,13 @@ def __init__( interceptors: list[_ServiceInterceptor] | None = None, protocol: ClientProtocol[Any, Any] | None = None, region: str | None = None, - retry_strategy: RetryStrategy | None = None, + retry_strategy: RetryStrategy | RetryStrategyOptions | None = None, sdk_ua_app_id: str | None = None, transport: ClientTransport[Any, Any] | None = None, user_agent_extra: str | None = None, ): - """Constructor. + """ + Constructor. :param auth_scheme_resolver: An auth scheme resolver that determines the auth scheme for each operation. @@ -164,7 +165,8 @@ def __init__( service endpoint. :param retry_strategy: - The retry strategy for issuing retry tokens and computing retry delays. + The retry strategy or options for configuring retry behavior. Can be either a + configured RetryStrategy or RetryStrategyOptions to create one. :param sdk_ua_app_id: A unique and opaque application ID that is appended to the User-Agent header. @@ -174,7 +176,6 @@ def __init__( :param user_agent_extra: Additional suffix to be added to the User-Agent header. - """ self.auth_scheme_resolver = auth_scheme_resolver or HTTPAuthSchemeResolver() self.auth_schemes = auth_schemes or { @@ -194,7 +195,7 @@ def __init__( _SCHEMA_AMAZON_BEDROCK_FRONTEND_SERVICE ) self.region = region - self.retry_strategy = retry_strategy or SimpleRetryStrategy() + self.retry_strategy = retry_strategy self.sdk_ua_app_id = sdk_ua_app_id self.transport = transport or AWSCRTHTTPClient() self.user_agent_extra = user_agent_extra diff --git a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/models.py b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/models.py index 90cde1b..653e020 100644 --- a/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/models.py +++ b/clients/aws-sdk-bedrock-runtime/src/aws_sdk_bedrock_runtime/models.py @@ -16,6 +16,7 @@ from ._private.schemas import ( ACCESS_DENIED_EXCEPTION as _SCHEMA_ACCESS_DENIED_EXCEPTION, ANY_TOOL_CHOICE as _SCHEMA_ANY_TOOL_CHOICE, + APPLIED_GUARDRAIL_DETAILS as _SCHEMA_APPLIED_GUARDRAIL_DETAILS, APPLY_GUARDRAIL as _SCHEMA_APPLY_GUARDRAIL, APPLY_GUARDRAIL_INPUT as _SCHEMA_APPLY_GUARDRAIL_INPUT, APPLY_GUARDRAIL_OUTPUT as _SCHEMA_APPLY_GUARDRAIL_OUTPUT, @@ -150,13 +151,18 @@ RESOURCE_NOT_FOUND_EXCEPTION as _SCHEMA_RESOURCE_NOT_FOUND_EXCEPTION, RESPONSE_STREAM as _SCHEMA_RESPONSE_STREAM, S3_LOCATION as _SCHEMA_S3_LOCATION, + SEARCH_RESULT_BLOCK as _SCHEMA_SEARCH_RESULT_BLOCK, + SEARCH_RESULT_CONTENT_BLOCK as _SCHEMA_SEARCH_RESULT_CONTENT_BLOCK, + SEARCH_RESULT_LOCATION as _SCHEMA_SEARCH_RESULT_LOCATION, SERVICE_QUOTA_EXCEEDED_EXCEPTION as _SCHEMA_SERVICE_QUOTA_EXCEEDED_EXCEPTION, + SERVICE_TIER as _SCHEMA_SERVICE_TIER, SERVICE_UNAVAILABLE_EXCEPTION as _SCHEMA_SERVICE_UNAVAILABLE_EXCEPTION, SPECIFIC_TOOL_CHOICE as _SCHEMA_SPECIFIC_TOOL_CHOICE, START_ASYNC_INVOKE as _SCHEMA_START_ASYNC_INVOKE, START_ASYNC_INVOKE_INPUT as _SCHEMA_START_ASYNC_INVOKE_INPUT, START_ASYNC_INVOKE_OUTPUT as _SCHEMA_START_ASYNC_INVOKE_OUTPUT, SYSTEM_CONTENT_BLOCK as _SCHEMA_SYSTEM_CONTENT_BLOCK, + SYSTEM_TOOL as _SCHEMA_SYSTEM_TOOL, TAG as _SCHEMA_TAG, THROTTLING_EXCEPTION as _SCHEMA_THROTTLING_EXCEPTION, TOKEN_USAGE as _SCHEMA_TOKEN_USAGE, @@ -165,6 +171,8 @@ TOOL_CONFIGURATION as _SCHEMA_TOOL_CONFIGURATION, TOOL_INPUT_SCHEMA as _SCHEMA_TOOL_INPUT_SCHEMA, TOOL_RESULT_BLOCK as _SCHEMA_TOOL_RESULT_BLOCK, + TOOL_RESULT_BLOCK_DELTA as _SCHEMA_TOOL_RESULT_BLOCK_DELTA, + TOOL_RESULT_BLOCK_START as _SCHEMA_TOOL_RESULT_BLOCK_START, TOOL_RESULT_CONTENT_BLOCK as _SCHEMA_TOOL_RESULT_CONTENT_BLOCK, TOOL_SPECIFICATION as _SCHEMA_TOOL_SPECIFICATION, TOOL_USE_BLOCK as _SCHEMA_TOOL_USE_BLOCK, @@ -173,6 +181,7 @@ VALIDATION_EXCEPTION as _SCHEMA_VALIDATION_EXCEPTION, VIDEO_BLOCK as _SCHEMA_VIDEO_BLOCK, VIDEO_SOURCE as _SCHEMA_VIDEO_SOURCE, + WEB_LOCATION as _SCHEMA_WEB_LOCATION, ) @@ -194,7 +203,6 @@ class AccessDeniedException(ServiceError): the requested action. For troubleshooting this error, see ``AccessDeniedException ``_ in the Amazon Bedrock User Guide - """ fault: Literal["client", "server"] | None = "client" @@ -299,7 +307,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class AsyncInvokeS3OutputDataConfig: """ Asynchronous invocation output data settings. - """ s3_uri: str @@ -311,6 +318,7 @@ class AsyncInvokeS3OutputDataConfig: """ A KMS encryption key ID. """ + bucket_owner: str | None = None """ If the bucket belongs to another AWS account, specify that account's ID. @@ -375,7 +383,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class AsyncInvokeOutputDataConfigS3OutputDataConfig: """ A storage location for the output data in an S3 bucket - """ value: AsyncInvokeS3OutputDataConfig @@ -423,7 +430,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Asynchronous invocation output data settings. - """ @@ -500,14 +506,17 @@ class GetAsyncInvokeOutput: """ The invocation's idempotency token. """ + failure_message: str | None = field(repr=False, default=None) """ An error message. """ + last_modified_time: datetime | None = None """ The invocation's last modified time. """ + end_time: datetime | None = None """ When the invocation ended. @@ -626,7 +635,6 @@ class InternalServerException(ServiceError): An internal server error occurred. For troubleshooting this error, see ``InternalFailure ``_ in the Amazon Bedrock User Guide - """ fault: Literal["client", "server"] | None = "server" @@ -668,7 +676,6 @@ class ThrottlingException(ServiceError): Your request was denied due to exceeding the account quotas for *Amazon Bedrock*. For troubleshooting this error, see ``ThrottlingException ``_ in the Amazon Bedrock User Guide - """ fault: Literal["client", "server"] | None = "client" @@ -710,7 +717,6 @@ class ValidationException(ServiceError): The input fails to satisfy the constraints specified by *Amazon Bedrock*. For troubleshooting this error, see ``ValidationError ``_ in the Amazon Bedrock User Guide - """ fault: Literal["client", "server"] | None = "client" @@ -790,27 +796,33 @@ class ListAsyncInvokesInput: """ Include invocations submitted after this time. """ + submit_time_before: datetime | None = None """ Include invocations submitted before this time. """ + status_equals: str | None = None """ Filter invocations by status. """ + max_results: int | None = None """ The maximum number of invocations to return in one page of results. """ + next_token: str | None = None """ Specify the pagination token from a previous request to retrieve the next page of results. """ + sort_by: str = "SubmissionTime" """ How to sort the response. """ + sort_order: str = "Descending" """ The sorting order for the response. @@ -914,7 +926,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class AsyncInvokeSummary: """ A summary of an asynchronous invocation. - """ invocation_arn: str @@ -941,18 +952,22 @@ class AsyncInvokeSummary: """ The invocation's idempotency token. """ + status: str | None = None """ The invocation's status. """ + failure_message: str | None = field(repr=False, default=None) """ An error message. """ + last_modified_time: datetime | None = None """ When the invocation was last modified. """ + end_time: datetime | None = None """ When the invocation ended. @@ -1098,6 +1113,7 @@ class ListAsyncInvokesOutput: Specify the pagination token from a previous request to retrieve the next page of results. """ + async_invoke_summaries: list[AsyncInvokeSummary] | None = None """ A list of invocation summaries. @@ -1184,7 +1200,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ConflictException(ServiceError): """ Error occurred because of a conflict while performing an operation. - """ fault: Literal["client", "server"] | None = "client" @@ -1226,7 +1241,6 @@ class ResourceNotFoundException(ServiceError): The specified resource ARN was not found. For troubleshooting this error, see ``ResourceNotFound ``_ in the Amazon Bedrock User Guide - """ fault: Literal["client", "server"] | None = "client" @@ -1270,7 +1284,6 @@ class ServiceQuotaExceededException(ServiceError): Your request exceeds the service quota for your account. You can view your quotas at ``Viewing service quotas ``_. You can resubmit your request later. - """ fault: Literal["client", "server"] | None = "client" @@ -1315,7 +1328,6 @@ class ServiceUnavailableException(ServiceError): The service isn't currently available. For troubleshooting this error, see ``ServiceUnavailable ``_ in the Amazon Bedrock User Guide - """ fault: Literal["client", "server"] | None = "server" @@ -1357,7 +1369,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class Tag: """ A tag. - """ key: str @@ -1429,18 +1440,22 @@ class StartAsyncInvokeInput: """ Specify idempotency token to ensure that requests are not duplicated. """ + model_id: str | None = None """ The model to invoke. """ + model_input: Document | None = field(repr=False, default=None) """ Input to send to the model. """ + output_data_config: AsyncInvokeOutputDataConfig | None = None """ Where to store the output. """ + tags: list[Tag] | None = None """ Tags to apply to the invocation. @@ -1607,7 +1622,6 @@ class GuardrailImageFormat(StrEnum): class GuardrailImageSourceBytes: """ The bytes details of the guardrail image source. Object used in independent api. - """ value: bytes @@ -1657,7 +1671,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The image source (image bytes) of the guardrail image source. Object used in independent api. - """ @@ -1696,7 +1709,6 @@ class GuardrailImageBlock: """ Contain an image which user wants guarded. This block is accepted by the guardrails independent API. - """ format: str @@ -1783,7 +1795,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailTextBlock: """ The text block to be evaluated by the guardrail. - """ text: str @@ -1841,7 +1852,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailContentBlockText: """ Text within content block to be evaluated by the guardrail. - """ value: GuardrailTextBlock @@ -1863,7 +1873,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class GuardrailContentBlockImage: """ Image within guardrail content block to be evaluated by the guardrail. - """ value: GuardrailImageBlock @@ -1912,7 +1921,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The content block to be evaluated by the guardrail. - """ @@ -1990,18 +1998,22 @@ class ApplyGuardrailInput: """ The guardrail identifier used in the request to apply the guardrail. """ + guardrail_version: str | None = None """ The guardrail version used in the request to apply the guardrail. """ + source: str | None = None """ The source of data used in the request to apply the guardrail. """ + content: list[GuardrailContentBlock] | None = None """ The content details used in the request to apply the guardrail. """ + output_scope: str | None = None """ Specifies the scope of the output that you get in the response. Set to ``FULL`` @@ -2094,18 +2106,168 @@ class GuardrailAction(StrEnum): GUARDRAIL_INTERVENED = "GUARDRAIL_INTERVENED" +class GuardrailOrigin(StrEnum): + REQUEST = "REQUEST" + ACCOUNT_ENFORCED = "ACCOUNT_ENFORCED" + ORGANIZATION_ENFORCED = "ORGANIZATION_ENFORCED" + + +def _serialize_guardrail_origin_list( + serializer: ShapeSerializer, schema: Schema, value: list[str] +) -> None: + member_schema = schema.members["member"] + with serializer.begin_list(schema, len(value)) as ls: + for e in value: + ls.write_string(member_schema, e) + + +def _deserialize_guardrail_origin_list( + deserializer: ShapeDeserializer, schema: Schema +) -> list[str]: + result: list[str] = [] + member_schema = schema.members["member"] + + def _read_value(d: ShapeDeserializer): + if d.is_null(): + d.read_null() + + else: + result.append(d.read_string(member_schema)) + + deserializer.read_list(schema, _read_value) + return result + + +class GuardrailOwnership(StrEnum): + SELF = "SELF" + CROSS_ACCOUNT = "CROSS_ACCOUNT" + + +@dataclass(kw_only=True) +class AppliedGuardrailDetails: + """ + Details about the specific guardrail that was applied during this assessment, + including its identifier, version, ARN, origin, and ownership information. + """ + + guardrail_id: str | None = None + """ + The unique ID of the guardrail that was applied. + """ + + guardrail_version: str | None = None + """ + The version of the guardrail that was applied. + """ + + guardrail_arn: str | None = None + """ + The ARN of the guardrail that was applied. + """ + + guardrail_origin: list[str] | None = None + """ + The origin of how the guardrail was applied. This can be either requested at the + API level or enforced at the account or organization level as a default + guardrail. + """ + + guardrail_ownership: str | None = None + """ + The ownership type of the guardrail, indicating whether it is owned by the + requesting account or is a cross-account guardrail shared from another AWS + account. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_APPLIED_GUARDRAIL_DETAILS, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.guardrail_id is not None: + serializer.write_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailId"], + self.guardrail_id, + ) + + if self.guardrail_version is not None: + serializer.write_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailVersion"], + self.guardrail_version, + ) + + if self.guardrail_arn is not None: + serializer.write_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailArn"], + self.guardrail_arn, + ) + + if self.guardrail_origin is not None: + _serialize_guardrail_origin_list( + serializer, + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailOrigin"], + self.guardrail_origin, + ) + + if self.guardrail_ownership is not None: + serializer.write_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailOwnership"], + self.guardrail_ownership, + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["guardrail_id"] = de.read_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailId"] + ) + + case 1: + kwargs["guardrail_version"] = de.read_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailVersion"] + ) + + case 2: + kwargs["guardrail_arn"] = de.read_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailArn"] + ) + + case 3: + kwargs["guardrail_origin"] = _deserialize_guardrail_origin_list( + de, _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailOrigin"] + ) + + case 4: + kwargs["guardrail_ownership"] = de.read_string( + _SCHEMA_APPLIED_GUARDRAIL_DETAILS.members["guardrailOwnership"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_APPLIED_GUARDRAIL_DETAILS, consumer=_consumer) + return kwargs + + @dataclass(kw_only=True) class GuardrailAutomatedReasoningRule: """ References a specific automated reasoning policy rule that was applied during evaluation. - """ identifier: str | None = None """ The unique identifier of the automated reasoning rule. """ + policy_version_arn: str | None = None """ The ARN of the automated reasoning policy version that contains this rule. @@ -2190,13 +2352,13 @@ class GuardrailAutomatedReasoningStatement: """ A logical statement that includes both formal logic representation and natural language explanation. - """ logic: str | None = field(repr=False, default=None) """ The formal logical representation of the statement. """ + natural_language: str | None = field(repr=False, default=None) """ The natural language explanation of the logical statement. @@ -2288,7 +2450,6 @@ class GuardrailAutomatedReasoningLogicWarning: """ Identifies logical issues in the translated statements that exist independent of any policy rules, such as statements that are always true or always false. - """ type: str | None = None @@ -2296,11 +2457,13 @@ class GuardrailAutomatedReasoningLogicWarning: The category of the detected logical issue, such as statements that are always true or always false. """ + premises: list[GuardrailAutomatedReasoningStatement] | None = None """ The logical statements that serve as premises under which the claims are validated. """ + claims: list[GuardrailAutomatedReasoningStatement] | None = None """ The logical statements that are validated while assuming the policy and @@ -2384,7 +2547,6 @@ class GuardrailAutomatedReasoningInputTextReference: """ References a portion of the original input text that corresponds to logical elements. - """ text: str | None = field(repr=False, default=None) @@ -2465,7 +2627,6 @@ class GuardrailAutomatedReasoningTranslation: """ Contains the logical translation of natural language input into formal logical statements, including premises, claims, and confidence scores. - """ premises: list[GuardrailAutomatedReasoningStatement] | None = None @@ -2473,11 +2634,13 @@ class GuardrailAutomatedReasoningTranslation: The logical statements that serve as the foundation or assumptions for the claims. """ + claims: list[GuardrailAutomatedReasoningStatement] | None = None """ The logical statements that are being validated against the premises and policy rules. """ + untranslated_premises: ( list[GuardrailAutomatedReasoningInputTextReference] | None ) = None @@ -2485,6 +2648,7 @@ class GuardrailAutomatedReasoningTranslation: References to portions of the original input text that correspond to the premises but could not be fully translated. """ + untranslated_claims: list[GuardrailAutomatedReasoningInputTextReference] | None = ( None ) @@ -2492,6 +2656,7 @@ class GuardrailAutomatedReasoningTranslation: References to portions of the original input text that correspond to the claims but could not be fully translated. """ + confidence: float | None = None """ A confidence score between 0 and 1 indicating how certain the system is about @@ -2611,18 +2776,19 @@ class GuardrailAutomatedReasoningImpossibleFinding: """ Indicates that no valid claims can be made due to logical contradictions in the premises or rules. - """ translation: GuardrailAutomatedReasoningTranslation | None = None """ The logical translation of the input that this finding evaluates. """ + contradicting_rules: list[GuardrailAutomatedReasoningRule] | None = None """ The automated reasoning policy rules that contradict the claims and/or premises in the input. """ + logic_warning: GuardrailAutomatedReasoningLogicWarning | None = None """ Indication of a logic issue with the translation without needing to consider the @@ -2704,17 +2870,18 @@ class GuardrailAutomatedReasoningInvalidFinding: """ Indicates that the claims are logically false and contradictory to the established rules or premises. - """ translation: GuardrailAutomatedReasoningTranslation | None = None """ The logical translation of the input that this finding invalidates. """ + contradicting_rules: list[GuardrailAutomatedReasoningRule] | None = None """ The automated reasoning policy rules that contradict the claims in the input. """ + logic_warning: GuardrailAutomatedReasoningLogicWarning | None = None """ Indication of a logic issue with the translation without needing to consider the @@ -2796,7 +2963,6 @@ class GuardrailAutomatedReasoningNoTranslationsFinding: """ Indicates that no relevant logical information could be extracted from the input for validation. - """ def serialize(self, serializer: ShapeSerializer): @@ -2832,7 +2998,6 @@ class GuardrailAutomatedReasoningScenario: """ Represents a logical scenario where claims can be evaluated as true or false, containing specific logical assignments. - """ statements: list[GuardrailAutomatedReasoningStatement] | None = None @@ -2885,21 +3050,23 @@ class GuardrailAutomatedReasoningSatisfiableFinding: """ Indicates that the claims could be either true or false depending on additional assumptions not provided in the input. - """ translation: GuardrailAutomatedReasoningTranslation | None = None """ The logical translation of the input that this finding evaluates. """ + claims_true_scenario: GuardrailAutomatedReasoningScenario | None = None """ An example scenario demonstrating how the claims could be logically true. """ + claims_false_scenario: GuardrailAutomatedReasoningScenario | None = None """ An example scenario demonstrating how the claims could be logically false. """ + logic_warning: GuardrailAutomatedReasoningLogicWarning | None = None """ Indication of a logic issue with the translation without needing to consider the @@ -2989,7 +3156,6 @@ class GuardrailAutomatedReasoningTooComplexFinding: """ Indicates that the input exceeds the processing capacity due to the volume or complexity of the logical information. - """ def serialize(self, serializer: ShapeSerializer): @@ -3078,7 +3244,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailAutomatedReasoningTranslationOption: """ Represents one possible logical interpretation of ambiguous input content. - """ translations: list[GuardrailAutomatedReasoningTranslation] | None = None @@ -3162,7 +3327,6 @@ class GuardrailAutomatedReasoningTranslationAmbiguousFinding: """ Indicates that the input has multiple valid logical interpretations, requiring additional context or clarification. - """ options: list[GuardrailAutomatedReasoningTranslationOption] | None = None @@ -3170,6 +3334,7 @@ class GuardrailAutomatedReasoningTranslationAmbiguousFinding: Different logical interpretations that were detected during translation of the input. """ + difference_scenarios: list[GuardrailAutomatedReasoningScenario] | None = None """ Scenarios showing how the different translation options differ in meaning. @@ -3244,22 +3409,24 @@ class GuardrailAutomatedReasoningValidFinding: """ Indicates that the claims are definitively true and logically implied by the premises, with no possible alternative interpretations. - """ translation: GuardrailAutomatedReasoningTranslation | None = None """ The logical translation of the input that this finding validates. """ + claims_true_scenario: GuardrailAutomatedReasoningScenario | None = None """ An example scenario demonstrating how the claims are logically true. """ + supporting_rules: list[GuardrailAutomatedReasoningRule] | None = None """ The automated reasoning policy rules that support why this result is considered valid. """ + logic_warning: GuardrailAutomatedReasoningLogicWarning | None = None """ Indication of a logic issue with the translation without needing to consider the @@ -3355,7 +3522,6 @@ class GuardrailAutomatedReasoningFindingValid: Contains the result when the automated reasoning evaluation determines that the claims in the input are logically valid and definitively true based on the provided premises and policy rules. - """ value: GuardrailAutomatedReasoningValidFinding @@ -3381,7 +3547,6 @@ class GuardrailAutomatedReasoningFindingInvalid: Contains the result when the automated reasoning evaluation determines that the claims in the input are logically invalid and contradict the established premises or policy rules. - """ value: GuardrailAutomatedReasoningInvalidFinding @@ -3407,7 +3572,6 @@ class GuardrailAutomatedReasoningFindingSatisfiable: Contains the result when the automated reasoning evaluation determines that the claims in the input could be either true or false depending on additional assumptions not provided in the input context. - """ value: GuardrailAutomatedReasoningSatisfiableFinding @@ -3436,7 +3600,6 @@ class GuardrailAutomatedReasoningFindingImpossible: Contains the result when the automated reasoning evaluation determines that no valid logical conclusions can be drawn due to contradictions in the premises or policy rules themselves. - """ value: GuardrailAutomatedReasoningImpossibleFinding @@ -3463,7 +3626,6 @@ class GuardrailAutomatedReasoningFindingTranslationAmbiguous: Contains the result when the automated reasoning evaluation detects that the input has multiple valid logical interpretations, requiring additional context or clarification to proceed with validation. - """ value: GuardrailAutomatedReasoningTranslationAmbiguousFinding @@ -3494,7 +3656,6 @@ class GuardrailAutomatedReasoningFindingTooComplex: Contains the result when the automated reasoning evaluation cannot process the input due to its complexity or volume exceeding the system's processing capacity for logical analysis. - """ value: GuardrailAutomatedReasoningTooComplexFinding @@ -3521,7 +3682,6 @@ class GuardrailAutomatedReasoningFindingNoTranslations: Contains the result when the automated reasoning evaluation cannot extract any relevant logical information from the input that can be validated against the policy rules. - """ value: GuardrailAutomatedReasoningNoTranslationsFinding @@ -3582,7 +3742,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: Represents a logical validation result from automated reasoning policy evaluation. The finding indicates whether claims in the input are logically valid, invalid, satisfiable, impossible, or have other logical issues. - """ @@ -3688,7 +3847,6 @@ class GuardrailAutomatedReasoningPolicyAssessment: """ Contains the results of automated reasoning policy evaluation, including logical findings about the validity of claims made in the input content. - """ findings: list[GuardrailAutomatedReasoningFinding] | None = None @@ -3773,7 +3931,6 @@ class GuardrailContentFilterType(StrEnum): class GuardrailContentFilter: """ The content filter for a guardrail. - """ type: str @@ -3795,6 +3952,7 @@ class GuardrailContentFilter: """ The filter strength setting for the guardrail content filter. """ + detected: bool | None = None """ Indicates whether content that breaches the guardrail configuration is detected. @@ -3895,7 +4053,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailContentPolicyAssessment: """ An assessment of a content policy for a guardrail. - """ filters: list[GuardrailContentFilter] @@ -3952,7 +4109,6 @@ class GuardrailContextualGroundingFilterType(StrEnum): class GuardrailContextualGroundingFilter: """ The details for the guardrails contextual grounding filter. - """ type: str @@ -4085,7 +4241,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailContextualGroundingPolicyAssessment: """ The policy assessment details for the guardrails contextual grounding filter. - """ filters: list[GuardrailContextualGroundingFilter] | None = None @@ -4141,13 +4296,13 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailImageCoverage: """ The details of the guardrail image coverage. - """ guarded: int | None = None """ The count (integer) of images guardrails guarded. """ + total: int | None = None """ Represents the total number of images (integer) that were in the request @@ -4199,13 +4354,13 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailTextCharactersCoverage: """ The guardrail coverage for the text characters. - """ guarded: int | None = None """ The text characters that were guarded by the guardrail coverage. """ + total: int | None = None """ The total text characters by the guardrail coverage. @@ -4259,13 +4414,13 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailCoverage: """ The action of the guardrail coverage details. - """ text_characters: GuardrailTextCharactersCoverage | None = None """ The text characters of the guardrail coverage details. """ + images: GuardrailImageCoverage | None = None """ The guardrail coverage for images (the number of images that guardrails @@ -4316,7 +4471,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailUsage: """ The details on the use of the guardrail. - """ topic_policy_units: int @@ -4353,10 +4507,12 @@ class GuardrailUsage: """ The content policy image units processed by the guardrail. """ + automated_reasoning_policy_units: int | None = None """ The number of text units processed by the automated reasoning policy. """ + automated_reasoning_policies: int | None = None """ The number of automated reasoning policies that were processed during the @@ -4479,17 +4635,18 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailInvocationMetrics: """ The invocation metrics for the guardrail. - """ guardrail_processing_latency: int | None = None """ The processing latency details for the guardrail invocation metrics. """ + usage: GuardrailUsage | None = None """ The usage details for the guardrail invocation metrics. """ + guardrail_coverage: GuardrailCoverage | None = None """ The coverage details for the guardrail invocation metrics. @@ -4594,7 +4751,6 @@ class GuardrailPiiEntityType(StrEnum): class GuardrailPiiEntityFilter: """ A Personally Identifiable Information (PII) entity configured in a guardrail. - """ match: str @@ -4704,7 +4860,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailRegexFilter: """ A Regex filter configured in a guardrail. - """ action: str @@ -4716,14 +4871,17 @@ class GuardrailRegexFilter: """ The regex filter name. """ + match: str | None = None """ The regesx filter match. """ + regex: str | None = None """ The regex query. """ + detected: bool | None = None """ Indicates whether custom regex entities that breach the guardrail configuration @@ -4827,8 +4985,7 @@ def _read_value(d: ShapeDeserializer): @dataclass(kw_only=True) class GuardrailSensitiveInformationPolicyAssessment: """ - The assessment for aPersonally Identifiable Information (PII) policy. - + The assessment for a Personally Identifiable Information (PII) policy. """ pii_entities: list[GuardrailPiiEntityFilter] @@ -4913,7 +5070,6 @@ class GuardrailTopicType(StrEnum): class GuardrailTopic: """ Information about a topic guardrail. - """ name: str @@ -5016,7 +5172,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailTopicPolicyAssessment: """ A behavior assessment of a topic policy. - """ topics: list[GuardrailTopic] @@ -5067,7 +5222,6 @@ class GuardrailWordPolicyAction(StrEnum): class GuardrailCustomWord: """ A custom word configured in a guardrail. - """ match: str @@ -5166,7 +5320,6 @@ class GuardrailManagedWordType(StrEnum): class GuardrailManagedWord: """ A managed word configured in a guardrail. - """ match: str @@ -5274,7 +5427,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailWordPolicyAssessment: """ The word policy assessment. - """ custom_words: list[GuardrailCustomWord] @@ -5342,33 +5494,37 @@ class GuardrailAssessment: """ A behavior assessment of the guardrail policies used in a call to the Converse API. - """ topic_policy: GuardrailTopicPolicyAssessment | None = None """ The topic policy. """ + content_policy: GuardrailContentPolicyAssessment | None = None """ The content policy. """ + word_policy: GuardrailWordPolicyAssessment | None = None """ The word policy. """ + sensitive_information_policy: ( GuardrailSensitiveInformationPolicyAssessment | None ) = None """ The sensitive information policy. """ + contextual_grounding_policy: GuardrailContextualGroundingPolicyAssessment | None = ( None ) """ The contextual grounding policy used for the guardrail assessment. """ + automated_reasoning_policy: GuardrailAutomatedReasoningPolicyAssessment | None = ( None ) @@ -5376,11 +5532,18 @@ class GuardrailAssessment: The automated reasoning policy assessment results, including logical validation findings for the input content. """ + invocation_metrics: GuardrailInvocationMetrics | None = None """ The invocation metrics for the guardrail assessment. """ + applied_guardrail_details: AppliedGuardrailDetails | None = None + """ + Details about the specific guardrail that was applied during this assessment, + including its identifier, version, ARN, origin, and ownership information. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_GUARDRAIL_ASSESSMENT, self) @@ -5425,6 +5588,12 @@ def serialize_members(self, serializer: ShapeSerializer): self.invocation_metrics, ) + if self.applied_guardrail_details is not None: + serializer.write_struct( + _SCHEMA_GUARDRAIL_ASSESSMENT.members["appliedGuardrailDetails"], + self.applied_guardrail_details, + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -5470,6 +5639,11 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: GuardrailInvocationMetrics.deserialize(de) ) + case 7: + kwargs["applied_guardrail_details"] = ( + AppliedGuardrailDetails.deserialize(de) + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -5506,7 +5680,6 @@ def _read_value(d: ShapeDeserializer): class GuardrailOutputContent: """ The output content produced by the guardrail. - """ text: str | None = None @@ -5596,6 +5769,7 @@ class ApplyGuardrailOutput: """ The reason for the action taken when harmful content is detected. """ + guardrail_coverage: GuardrailCoverage | None = None """ The guardrail coverage details in the apply guardrail response. @@ -5723,15 +5897,14 @@ class GuardrailConfiguration: """ Configuration information for a guardrail that you use with the ``Converse ``_ operation. - """ - guardrail_identifier: str + guardrail_identifier: str = "" """ The identifier for the guardrail. """ - guardrail_version: str + guardrail_version: str = "" """ The version of the guardrail. """ @@ -5825,7 +5998,6 @@ class InferenceConfiguration: If you need to pass additional parameters that the model supports, use the ``additionalModelRequestFields`` request field in the call to ``Converse`` or ``ConverseStream``. For more information, see ``Model parameters ``_ . - """ max_tokens: int | None = None @@ -5835,6 +6007,7 @@ class InferenceConfiguration: information, see `Inference parameters for foundation models `_ . """ + temperature: float | None = None """ The likelihood of the model selecting higher-probability options while @@ -5846,6 +6019,7 @@ class InferenceConfiguration: more information, see `Inference parameters for foundation models `_ . """ + top_p: float | None = None """ The percentage of most-likely candidates that the model considers for the next @@ -5857,6 +6031,7 @@ class InferenceConfiguration: more information, see `Inference parameters for foundation models `_ . """ + stop_sequences: list[str] | None = None """ A list of stop sequences. A stop sequence is a sequence of characters that @@ -5934,7 +6109,6 @@ class CachePointType(StrEnum): class CachePointBlock: """ Defines a section of content to be cached for reuse in subsequent API calls. - """ type: str @@ -5975,17 +6149,18 @@ class DocumentCharLocation: """ Specifies a character-level location within a document, providing precise positioning information for cited content using start and end character indices. - """ document_index: int | None = None """ The index of the document within the array of documents provided in the request. """ + start: int | None = None """ The starting character position of the cited content within the document. """ + end: int | None = None """ The ending character position of the cited content within the document. @@ -6048,17 +6223,18 @@ class DocumentChunkLocation: """ Specifies a chunk-level location within a document, providing positioning information for cited content using logical document segments or chunks. - """ document_index: int | None = None """ The index of the document within the array of documents provided in the request. """ + start: int | None = None """ The starting chunk identifier or index of the cited content within the document. """ + end: int | None = None """ The ending chunk identifier or index of the cited content within the document. @@ -6121,17 +6297,18 @@ class DocumentPageLocation: """ Specifies a page-level location within a document, providing positioning information for cited content using page numbers. - """ document_index: int | None = None """ The index of the document within the array of documents provided in the request. """ + start: int | None = None """ The starting page number of the cited content within the document. """ + end: int | None = None """ The ending page number of the cited content within the document. @@ -6189,17 +6366,162 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: return kwargs -@dataclass -class CitationLocationDocumentChar: +@dataclass(kw_only=True) +class SearchResultLocation: """ - The character-level location within the document where the cited content is - found. - + Specifies a search result location within the content array, providing + positioning information for cited content using search result index and block + positions. """ - value: DocumentCharLocation + search_result_index: int | None = None + """ + The index of the search result content block where the cited content is found. + """ - def serialize(self, serializer: ShapeSerializer): + start: int | None = None + """ + The starting position in the content array where the cited content begins. + """ + + end: int | None = None + """ + The ending position in the content array where the cited content ends. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_SEARCH_RESULT_LOCATION, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.search_result_index is not None: + serializer.write_integer( + _SCHEMA_SEARCH_RESULT_LOCATION.members["searchResultIndex"], + self.search_result_index, + ) + + if self.start is not None: + serializer.write_integer( + _SCHEMA_SEARCH_RESULT_LOCATION.members["start"], self.start + ) + + if self.end is not None: + serializer.write_integer( + _SCHEMA_SEARCH_RESULT_LOCATION.members["end"], self.end + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["search_result_index"] = de.read_integer( + _SCHEMA_SEARCH_RESULT_LOCATION.members["searchResultIndex"] + ) + + case 1: + kwargs["start"] = de.read_integer( + _SCHEMA_SEARCH_RESULT_LOCATION.members["start"] + ) + + case 2: + kwargs["end"] = de.read_integer( + _SCHEMA_SEARCH_RESULT_LOCATION.members["end"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_SEARCH_RESULT_LOCATION, consumer=_consumer) + return kwargs + + +@dataclass(kw_only=True) +class WebLocation: + """ + Provides the URL and domain information for the website that was cited when + performing a web search. + """ + + url: str | None = None + """ + The URL that was cited when performing a web search. + """ + + domain: str | None = None + """ + The domain that was cited when performing a web search. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_WEB_LOCATION, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.url is not None: + serializer.write_string(_SCHEMA_WEB_LOCATION.members["url"], self.url) + + if self.domain is not None: + serializer.write_string(_SCHEMA_WEB_LOCATION.members["domain"], self.domain) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["url"] = de.read_string(_SCHEMA_WEB_LOCATION.members["url"]) + + case 1: + kwargs["domain"] = de.read_string( + _SCHEMA_WEB_LOCATION.members["domain"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_WEB_LOCATION, consumer=_consumer) + return kwargs + + +@dataclass +class CitationLocationWeb: + """ + The web URL that was cited for this reference. + """ + + value: WebLocation + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_CITATION_LOCATION, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_CITATION_LOCATION.members["web"], self.value) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=WebLocation.deserialize(deserializer)) + + +@dataclass +class CitationLocationDocumentChar: + """ + The character-level location within the document where the cited content is + found. + """ + + value: DocumentCharLocation + + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CITATION_LOCATION, self) def serialize_members(self, serializer: ShapeSerializer): @@ -6216,7 +6538,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class CitationLocationDocumentPage: """ The page-level location within the document where the cited content is found. - """ value: DocumentPageLocation @@ -6239,7 +6560,6 @@ class CitationLocationDocumentChunk: """ The chunk-level location within the document where the cited content is found, typically used for documents that have been segmented into logical chunks. - """ value: DocumentChunkLocation @@ -6257,6 +6577,28 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(value=DocumentChunkLocation.deserialize(deserializer)) +@dataclass +class CitationLocationSearchResultLocation: + """ + The search result location where the cited content is found, including the + search result index and block positions within the content array. + """ + + value: SearchResultLocation + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_CITATION_LOCATION, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_CITATION_LOCATION.members["searchResultLocation"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=SearchResultLocation.deserialize(deserializer)) + + @dataclass class CitationLocationUnknown: """Represents an unknown variant. @@ -6281,9 +6623,11 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: CitationLocation = Union[ - CitationLocationDocumentChar + CitationLocationWeb + | CitationLocationDocumentChar | CitationLocationDocumentPage | CitationLocationDocumentChunk + | CitationLocationSearchResultLocation | CitationLocationUnknown ] @@ -6291,7 +6635,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: Specifies the precise location within a source document where cited content can be found. This can include character-level positions, page numbers, or document chunks depending on the document type and indexing method. - """ @@ -6312,14 +6655,20 @@ def deserialize(self, deserializer: ShapeDeserializer) -> CitationLocation: def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: match schema.expect_member_index(): case 0: - self._set_result(CitationLocationDocumentChar.deserialize(de)) + self._set_result(CitationLocationWeb.deserialize(de)) case 1: - self._set_result(CitationLocationDocumentPage.deserialize(de)) + self._set_result(CitationLocationDocumentChar.deserialize(de)) case 2: + self._set_result(CitationLocationDocumentPage.deserialize(de)) + + case 3: self._set_result(CitationLocationDocumentChunk.deserialize(de)) + case 4: + self._set_result(CitationLocationSearchResultLocation.deserialize(de)) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -6335,7 +6684,6 @@ def _set_result(self, value: CitationLocation) -> None: class CitationSourceContentText: """ The text content from the source document that is being cited. - """ value: str @@ -6385,7 +6733,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Contains the actual text content from a source document that is being cited or referenced in the model's response. - """ @@ -6450,18 +6797,24 @@ class Citation: Contains information about a citation that references a specific source document. Citations provide traceability between the model's generated response and the source documents that informed that response. - """ title: str | None = None """ The title or identifier of the source document being cited. """ + + source: str | None = None + """ + The source from the original search result that provided the cited content. + """ + source_content: list[CitationSourceContent] | None = None """ The specific content from the source document that was referenced or cited in the generated response. """ + location: CitationLocation | None = None """ The precise location within the source document where the cited content can be @@ -6475,6 +6828,9 @@ def serialize_members(self, serializer: ShapeSerializer): if self.title is not None: serializer.write_string(_SCHEMA_CITATION.members["title"], self.title) + if self.source is not None: + serializer.write_string(_SCHEMA_CITATION.members["source"], self.source) + if self.source_content is not None: _serialize_citation_source_content_list( serializer, @@ -6499,13 +6855,18 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: kwargs["title"] = de.read_string(_SCHEMA_CITATION.members["title"]) case 1: + kwargs["source"] = de.read_string( + _SCHEMA_CITATION.members["source"] + ) + + case 2: kwargs["source_content"] = ( _deserialize_citation_source_content_list( de, _SCHEMA_CITATION.members["sourceContent"] ) ) - case 2: + case 3: kwargs["location"] = _CitationLocationDeserializer().deserialize(de) case _: @@ -6545,7 +6906,6 @@ class CitationGeneratedContentText: """ The text content that was generated by the model and is supported by the associated citation. - """ value: str @@ -6597,7 +6957,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Contains the generated text content that corresponds to or is supported by a citation from a source document. - """ @@ -6663,13 +7022,13 @@ class CitationsContentBlock: information. This block type is returned when document citations are enabled, providing traceability between the generated content and the source documents that informed the response. - """ content: list[CitationGeneratedContent] | None = None """ The generated content that is supported by the associated citations. """ + citations: list[Citation] | None = None """ An array of citations that reference the source documents used to generate the @@ -6727,14 +7086,13 @@ class CitationsConfig: Configuration settings for enabling and controlling document citations in Converse API responses. When enabled, the model can include citation information that links generated content back to specific source documents. - """ enabled: bool """ - Specifies whether document citations should be included in the model's response. - When set to true, the model can generate citations that reference the source - documents used to inform the response. + Specifies whether citations from the selected document should be used in the + model's response. When set to true, the model can generate citations that + reference the source documents used to inform the response. """ def serialize(self, serializer: ShapeSerializer): @@ -6783,7 +7141,6 @@ class DocumentFormat(StrEnum): class DocumentContentBlockText: """ The text content of the document. - """ value: str @@ -6833,7 +7190,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Contains the actual content of a document that can be processed by the model and potentially cited in the response. - """ @@ -6896,7 +7252,6 @@ def _read_value(d: ShapeDeserializer): class S3Location: """ A storage location in an Amazon S3 bucket. - """ uri: str @@ -6949,7 +7304,6 @@ class DocumentSourceBytes: """ The raw bytes for the document. If you use an Amazon Web Services SDK, you don't need to encode the bytes in base64. - """ value: bytes @@ -6973,7 +7327,6 @@ class DocumentSourceS3Location: The location of a document object in an Amazon S3 bucket. To see which models support S3 uploads, see `Supported models and features for Converse `_ . - """ value: S3Location @@ -6995,7 +7348,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class DocumentSourceText: """ The text content of the document source. - """ value: str @@ -7018,7 +7370,6 @@ class DocumentSourceContent: """ The structured content of the document source, which may include various content blocks such as text, images, or other document elements. - """ value: list[DocumentContentBlock] @@ -7073,7 +7424,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Contains the content of a document. - """ @@ -7120,7 +7470,6 @@ def _set_result(self, value: DocumentSource) -> None: class DocumentBlock: """ A document to include in a message. - """ name: str @@ -7152,11 +7501,13 @@ class DocumentBlock: """ The format of a document, or its extension. """ + context: str | None = None """ Contextual information about how the document should be processed or interpreted by the model when generating citations. """ + citations: CitationsConfig | None = None """ Configuration settings that control how citations should be generated for this @@ -7227,7 +7578,6 @@ class GuardrailConverseImageFormat(StrEnum): class GuardrailConverseImageSourceBytes: """ The raw image bytes for the image. - """ value: bytes @@ -7278,7 +7628,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The image source (image bytes) of the guardrail converse image source. - """ @@ -7320,7 +7669,6 @@ def _set_result(self, value: GuardrailConverseImageSource) -> None: class GuardrailConverseImageBlock: """ An image block that contains images that you want to assess with a guardrail. - """ format: str @@ -7409,8 +7757,8 @@ def _read_value(d: ShapeDeserializer): class GuardrailConverseTextBlock: """ A text block that contains text that you want to assess with a guardrail. For - more information, see ``GuardrailConverseContentBlock``. - + more information, see ``GuardrailConverseContentBlock ``_ + . """ text: str @@ -7473,7 +7821,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class GuardrailConverseContentBlockText: """ The text to guard. - """ value: GuardrailConverseTextBlock @@ -7495,7 +7842,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class GuardrailConverseContentBlockImage: """ Image within converse content block to be evaluated by the guardrail. - """ value: GuardrailConverseImageBlock @@ -7547,7 +7893,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: A content block for selective guarding with the `Converse `_ or `ConverseStream `_ API operations. - """ @@ -7600,7 +7945,6 @@ class ImageSourceBytes: """ The raw image bytes for the image. If you use an AWS SDK, you don't need to encode the image bytes in base64. - """ value: bytes @@ -7622,7 +7966,6 @@ class ImageSourceS3Location: The location of an image object in an Amazon S3 bucket. To see which models support S3 uploads, see `Supported models and features for Converse `_ . - """ value: S3Location @@ -7665,7 +8008,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The source for an image. - """ @@ -7706,7 +8048,6 @@ def _set_result(self, value: ImageSource) -> None: class ImageBlock: """ Image content for a message. - """ format: str @@ -7755,7 +8096,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ReasoningTextBlock: """ Contains the reasoning that the model used to return the output. - """ text: str @@ -7811,7 +8151,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ReasoningContentBlockReasoningText: """ The reasoning that the model used to return the output. - """ value: ReasoningTextBlock @@ -7834,7 +8173,6 @@ class ReasoningContentBlockRedactedContent: """ The content in the reasoning that was encrypted by the model provider for safety reasons. The encryption doesn't affect the quality of responses. - """ value: bytes @@ -7890,7 +8228,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: respect to the content in the content block. Reasoning refers to a Chain of Thought (CoT) that the model generates to enhance the accuracy of its final response. - """ @@ -7927,6 +8264,157 @@ def _set_result(self, value: ReasoningContentBlock) -> None: self._result = value +@dataclass(kw_only=True) +class SearchResultContentBlock: + """ + A block within a search result that contains the content. + """ + + text: str + """ + The actual text content + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_SEARCH_RESULT_CONTENT_BLOCK, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_string( + _SCHEMA_SEARCH_RESULT_CONTENT_BLOCK.members["text"], self.text + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["text"] = de.read_string( + _SCHEMA_SEARCH_RESULT_CONTENT_BLOCK.members["text"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct( + _SCHEMA_SEARCH_RESULT_CONTENT_BLOCK, consumer=_consumer + ) + return kwargs + + +def _serialize_search_result_content_blocks( + serializer: ShapeSerializer, schema: Schema, value: list[SearchResultContentBlock] +) -> None: + member_schema = schema.members["member"] + with serializer.begin_list(schema, len(value)) as ls: + for e in value: + ls.write_struct(member_schema, e) + + +def _deserialize_search_result_content_blocks( + deserializer: ShapeDeserializer, schema: Schema +) -> list[SearchResultContentBlock]: + result: list[SearchResultContentBlock] = [] + + def _read_value(d: ShapeDeserializer): + if d.is_null(): + d.read_null() + + else: + result.append(SearchResultContentBlock.deserialize(d)) + + deserializer.read_list(schema, _read_value) + return result + + +@dataclass(kw_only=True) +class SearchResultBlock: + """ + A search result block that enables natural citations with proper source + attribution for retrieved content. + + .. note:: This field is only supported by Anthropic Claude Opus 4.1, Opus 4, + Sonnet 4.5, Sonnet 4, Sonnet 3.7, and 3.5 Haiku models. + """ + + source: str + """ + The source URL or identifier for the content. + """ + + title: str + """ + A descriptive title for the search result. + """ + + content: list[SearchResultContentBlock] + """ + An array of search result content block. + """ + + citations: CitationsConfig | None = None + """ + Configuration setting for citations + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_SEARCH_RESULT_BLOCK, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_string( + _SCHEMA_SEARCH_RESULT_BLOCK.members["source"], self.source + ) + serializer.write_string( + _SCHEMA_SEARCH_RESULT_BLOCK.members["title"], self.title + ) + _serialize_search_result_content_blocks( + serializer, _SCHEMA_SEARCH_RESULT_BLOCK.members["content"], self.content + ) + if self.citations is not None: + serializer.write_struct( + _SCHEMA_SEARCH_RESULT_BLOCK.members["citations"], self.citations + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["source"] = de.read_string( + _SCHEMA_SEARCH_RESULT_BLOCK.members["source"] + ) + + case 1: + kwargs["title"] = de.read_string( + _SCHEMA_SEARCH_RESULT_BLOCK.members["title"] + ) + + case 2: + kwargs["content"] = _deserialize_search_result_content_blocks( + de, _SCHEMA_SEARCH_RESULT_BLOCK.members["content"] + ) + + case 3: + kwargs["citations"] = CitationsConfig.deserialize(de) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_SEARCH_RESULT_BLOCK, consumer=_consumer) + return kwargs + + class VideoFormat(StrEnum): MKV = "mkv" MOV = "mov" @@ -7943,7 +8431,6 @@ class VideoFormat(StrEnum): class VideoSourceBytes: """ Video content encoded in base64. - """ value: bytes @@ -7965,7 +8452,6 @@ class VideoSourceS3Location: The location of a video object in an Amazon S3 bucket. To see which models support S3 uploads, see `Supported models and features for Converse `_ . - """ value: S3Location @@ -8010,7 +8496,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: A video source. You can upload a smaller video as a base64-encoded string as long as the encoded file is less than 25MB. You can also transfer videos up to 1GB in size from an S3 bucket. - """ @@ -8051,7 +8536,6 @@ def _set_result(self, value: VideoSource) -> None: class VideoBlock: """ A video block. - """ format: str @@ -8100,7 +8584,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ToolResultContentBlockJson: """ A tool result that is JSON format data. - """ value: Document @@ -8126,7 +8609,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ToolResultContentBlockText: """ A tool result that is text. - """ value: str @@ -8154,8 +8636,7 @@ class ToolResultContentBlockImage: A tool result that is an image. .. note:: - This field is only supported by Anthropic Claude 3 models. - + This field is only supported by Amazon Nova and Anthropic Claude 3 and 4 models. """ value: ImageBlock @@ -8177,7 +8658,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ToolResultContentBlockDocument: """ A tool result that is a document. - """ value: DocumentBlock @@ -8199,7 +8679,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ToolResultContentBlockVideo: """ A tool result that is video. - """ value: VideoBlock @@ -8217,6 +8696,27 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(value=VideoBlock.deserialize(deserializer)) +@dataclass +class ToolResultContentBlockSearchResult: + """ + A tool result that is a search result. + """ + + value: SearchResultBlock + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_TOOL_RESULT_CONTENT_BLOCK, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_TOOL_RESULT_CONTENT_BLOCK.members["searchResult"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=SearchResultBlock.deserialize(deserializer)) + + @dataclass class ToolResultContentBlockUnknown: """Represents an unknown variant. @@ -8246,12 +8746,13 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: | ToolResultContentBlockImage | ToolResultContentBlockDocument | ToolResultContentBlockVideo + | ToolResultContentBlockSearchResult | ToolResultContentBlockUnknown ] """ -The tool result content block. - +The tool result content block. For more information, see `Call a tool with the Converse API `_ +in the Amazon Bedrock User Guide. """ @@ -8286,6 +8787,9 @@ def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: case 4: self._set_result(ToolResultContentBlockVideo.deserialize(de)) + case 5: + self._set_result(ToolResultContentBlockSearchResult.deserialize(de)) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -8331,8 +8835,8 @@ class ToolResultStatus(StrEnum): class ToolResultBlock: """ A tool result block that contains the results for a tool request that the model - previously made. - + previously made. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide. """ tool_use_id: str @@ -8350,7 +8854,12 @@ class ToolResultBlock: The status for the tool result content block. .. note:: - This field is only supported Anthropic Claude 3 models. + This field is only supported by Amazon Nova and Anthropic Claude 3 and 4 models. + """ + + type: str | None = None + """ + The type for the tool result content block. """ def serialize(self, serializer: ShapeSerializer): @@ -8368,6 +8877,11 @@ def serialize_members(self, serializer: ShapeSerializer): _SCHEMA_TOOL_RESULT_BLOCK.members["status"], self.status ) + if self.type is not None: + serializer.write_string( + _SCHEMA_TOOL_RESULT_BLOCK.members["type"], self.type + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -8393,6 +8907,11 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: _SCHEMA_TOOL_RESULT_BLOCK.members["status"] ) + case 3: + kwargs["type"] = de.read_string( + _SCHEMA_TOOL_RESULT_BLOCK.members["type"] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -8400,13 +8919,17 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: return kwargs +class ToolUseType(StrEnum): + SERVER_TOOL_USE = "server_tool_use" + + @dataclass(kw_only=True) class ToolUseBlock: """ A tool use content block. Contains information about a tool that the model is requesting be run., The model uses the result from the tool to generate a - response. - + response. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide. """ tool_use_id: str @@ -8424,6 +8947,11 @@ class ToolUseBlock: The input to pass to the tool. """ + type: str | None = None + """ + The type for the tool request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_TOOL_USE_BLOCK, self) @@ -8433,6 +8961,8 @@ def serialize_members(self, serializer: ShapeSerializer): ) serializer.write_string(_SCHEMA_TOOL_USE_BLOCK.members["name"], self.name) serializer.write_document(_SCHEMA_TOOL_USE_BLOCK.members["input"], self.input) + if self.type is not None: + serializer.write_string(_SCHEMA_TOOL_USE_BLOCK.members["type"], self.type) @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: @@ -8459,6 +8989,11 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: _SCHEMA_TOOL_USE_BLOCK.members["input"] ) + case 3: + kwargs["type"] = de.read_string( + _SCHEMA_TOOL_USE_BLOCK.members["type"] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -8470,7 +9005,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ContentBlockText: """ Text to include in the message. - """ value: str @@ -8495,7 +9029,6 @@ class ContentBlockImage: .. note:: This field is only supported by Anthropic Claude 3 models. - """ value: ImageBlock @@ -8515,7 +9048,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ContentBlockDocument: """ A document to include in the message. - """ value: DocumentBlock @@ -8535,7 +9067,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ContentBlockVideo: """ Video to include in the message. - """ value: VideoBlock @@ -8555,7 +9086,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ContentBlockToolUse: """ Information about a tool use request from a model. - """ value: ToolUseBlock @@ -8575,7 +9105,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ContentBlockToolResult: """ The result for a tool request that a model makes. - """ value: ToolResultBlock @@ -8600,7 +9129,6 @@ class ContentBlockGuardContent: For more information, see *Use a guardrail with the Converse API* in the *Amazon Bedrock User Guide*. - """ value: GuardrailConverseContentBlock @@ -8624,7 +9152,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ContentBlockCachePoint: """ CachePoint to include in the message. - """ value: CachePointBlock @@ -8646,7 +9173,6 @@ class ContentBlockReasoningContent: Contains content regarding the reasoning that is carried out by the model. Reasoning refers to a Chain of Thought (CoT) that the model generates to enhance the accuracy of its final response. - """ value: ReasoningContentBlock @@ -8669,7 +9195,6 @@ class ContentBlockCitationsContent: """ A content block that contains both generated text and associated citation information, providing traceability between the response and source documents. - """ value: CitationsContentBlock @@ -8687,6 +9212,27 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(value=CitationsContentBlock.deserialize(deserializer)) +@dataclass +class ContentBlockSearchResult: + """ + Search result to include in the message. + """ + + value: SearchResultBlock + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_CONTENT_BLOCK, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_CONTENT_BLOCK.members["searchResult"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=SearchResultBlock.deserialize(deserializer)) + + @dataclass class ContentBlockUnknown: """Represents an unknown variant. @@ -8721,6 +9267,7 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: | ContentBlockCachePoint | ContentBlockReasoningContent | ContentBlockCitationsContent + | ContentBlockSearchResult | ContentBlockUnknown ] @@ -8729,7 +9276,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: the `Converse `_ or `ConverseStream `_ API operations. - """ @@ -8779,6 +9325,9 @@ def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: case 9: self._set_result(ContentBlockCitationsContent.deserialize(de)) + case 10: + self._set_result(ContentBlockSearchResult.deserialize(de)) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -8826,7 +9375,6 @@ class Message: A message input, or returned from, a call to ``Converse ``_ or ``ConverseStream ``_ . - """ role: str @@ -8918,7 +9466,6 @@ class PerformanceConfigLatency(StrEnum): class PerformanceConfiguration: """ Performance settings for a model. - """ latency: str = "standard" @@ -8960,7 +9507,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class PromptVariableValuesText: """ The text value that the variable maps to. - """ value: str @@ -9012,7 +9558,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: containing the values to fill in for them when running model invocation. For more information, see `How Prompt management works `_ . - """ @@ -9098,11 +9643,55 @@ def _read_value(k: str, d: ShapeDeserializer): return result +class ServiceTierType(StrEnum): + PRIORITY = "priority" + DEFAULT = "default" + FLEX = "flex" + + +@dataclass(kw_only=True) +class ServiceTier: + """ + Specifies the processing tier configuration used for serving the request. + """ + + type: str + """ + Specifies the processing tier type used for serving the request. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_SERVICE_TIER, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_string(_SCHEMA_SERVICE_TIER.members["type"], self.type) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["type"] = de.read_string( + _SCHEMA_SERVICE_TIER.members["type"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_SERVICE_TIER, consumer=_consumer) + return kwargs + + @dataclass class SystemContentBlockText: """ A system prompt for the model. - """ value: str @@ -9131,7 +9720,6 @@ class SystemContentBlockGuardContent: For more information, see *Use a guardrail with the Converse API* in the *Amazon Bedrock User Guide*. - """ value: GuardrailConverseContentBlock @@ -9155,7 +9743,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class SystemContentBlockCachePoint: """ CachePoint to include in the system prompt. - """ value: CachePointBlock @@ -9204,8 +9791,9 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: ] """ -A system content block. - +Contains configurations for instructions to provide the model for how to handle +input. To learn more, see `Using the Converse API `_ +. """ @@ -9274,8 +9862,8 @@ def _read_value(d: ShapeDeserializer): class AnyToolChoice: """ The model must request at least one tool (no text is generated). For example, - ``{"any" : {}}``. - + ``{"any" : {}}``. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide. """ def serialize(self, serializer: ShapeSerializer): @@ -9305,8 +9893,8 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class AutoToolChoice: """ The Model automatically decides if a tool should be called or whether to - generate text instead. For example, ``{"auto" : {}}``. - + generate text instead. For example, ``{"auto" : {}}``. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide """ def serialize(self, serializer: ShapeSerializer): @@ -9335,11 +9923,10 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: @dataclass(kw_only=True) class SpecificToolChoice: """ - The model must request a specific tool. For example, ``{"tool" : {"name" : "Your - tool name"}}``. + The model must request a specific tool. For example, ``{"tool" : {"name" : "Your tool name"}}``. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide .. note:: This field is only supported by Anthropic Claude 3 models. - """ name: str @@ -9380,7 +9967,6 @@ class ToolChoiceAuto: """ (Default). The Model automatically decides if a tool should be called or whether to generate text instead. - """ value: AutoToolChoice @@ -9400,7 +9986,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ToolChoiceAny: """ The model must request at least one tool (no text is generated). - """ value: AnyToolChoice @@ -9420,8 +10005,7 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ToolChoiceTool: """ The Model must request the specified tool. Only supported by Anthropic Claude 3 - models. - + and Amazon Nova models. """ value: SpecificToolChoice @@ -9463,10 +10047,8 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: ToolChoice = Union[ToolChoiceAuto | ToolChoiceAny | ToolChoiceTool | ToolChoiceUnknown] """ -Determines which tools the model should request in a call to ``Converse`` or -``ConverseStream``. ``ToolChoice`` is only supported by Anthropic Claude 3 -models and by Mistral AI Mistral Large. - +Determines which tools the model should request in a call to ``Converse`` or ``ConverseStream``. For more information, see `Call a tool with the Converse API `_ +in the Amazon Bedrock User Guide. """ @@ -9506,12 +10088,49 @@ def _set_result(self, value: ToolChoice) -> None: self._result = value +@dataclass(kw_only=True) +class SystemTool: + """ + Specifies a system-defined tool for the model to use. *System-defined tools* are + tools that are created and provided by the model provider. + """ + + name: str + """ + The name of the system-defined tool that you want to call. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_SYSTEM_TOOL, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_string(_SCHEMA_SYSTEM_TOOL.members["name"], self.name) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["name"] = de.read_string(_SCHEMA_SYSTEM_TOOL.members["name"]) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_SYSTEM_TOOL, consumer=_consumer) + return kwargs + + @dataclass class ToolInputSchemaJson: """ The JSON schema for the tool. For more information, see `JSON Schema Reference `_ . - """ value: Document @@ -9555,8 +10174,8 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: ToolInputSchema = Union[ToolInputSchemaJson | ToolInputSchemaUnknown] """ -The schema for the tool. The top level schema type must be ``object``. - +The schema for the tool. The top level schema type must be ``object``. For more information, see `Call a tool with the Converse API `_ +in the Amazon Bedrock User Guide. """ @@ -9593,8 +10212,8 @@ def _set_result(self, value: ToolInputSchema) -> None: @dataclass(kw_only=True) class ToolSpecification: """ - The specification for the tool. - + The specification for the tool. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide. """ name: str @@ -9662,7 +10281,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ToolToolSpec: """ The specfication for the tool. - """ value: ToolSpecification @@ -9678,11 +10296,29 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(value=ToolSpecification.deserialize(deserializer)) +@dataclass +class ToolSystemTool: + """ + Specifies the system-defined tool that you want use. + """ + + value: SystemTool + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_TOOL, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_TOOL.members["systemTool"], self.value) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=SystemTool.deserialize(deserializer)) + + @dataclass class ToolCachePoint: """ CachePoint to include in the tool configuration. - """ value: CachePointBlock @@ -9721,13 +10357,12 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: raise NotImplementedError() -Tool = Union[ToolToolSpec | ToolCachePoint | ToolUnknown] +Tool = Union[ToolToolSpec | ToolSystemTool | ToolCachePoint | ToolUnknown] """ Information about a tool that you can use with the Converse API. For more -information, see `Tool use (function calling) `_ +information, see `Call a tool with the Converse API `_ in the Amazon Bedrock User Guide. - """ @@ -9751,6 +10386,9 @@ def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: self._set_result(ToolToolSpec.deserialize(de)) case 1: + self._set_result(ToolSystemTool.deserialize(de)) + + case 2: self._set_result(ToolCachePoint.deserialize(de)) case _: @@ -9793,7 +10431,6 @@ class ToolConfiguration: Configuration information for the tools that you pass to a model. For more information, see ``Tool use (function calling) ``_ in the Amazon Bedrock User Guide. - """ tools: list[Tool] @@ -9873,15 +10510,18 @@ class ConverseInput: The Converse API doesn't support `imported models `_ . """ + messages: list[Message] | None = None """ The messages that you want to send to the model. """ + system: list[SystemContentBlock] | None = None """ A prompt that provides instructions or context to the model about the task it should perform, or the persona it should adopt during the conversation. """ + inference_config: InferenceConfiguration | None = None """ Inference parameters to pass to the model. ``Converse`` and ``ConverseStream`` @@ -9889,6 +10529,7 @@ class ConverseInput: parameters that the model supports, use the ``additionalModelRequestFields`` request field. """ + tool_config: ToolConfiguration | None = None """ Configuration information for the tools that the model can use when generating a @@ -9897,6 +10538,7 @@ class ConverseInput: For information about models that support tool use, see `Supported models and model features `_ . """ + guardrail_config: GuardrailConfiguration | None = None """ Configuration information for a guardrail that you want to use in the request. @@ -9905,12 +10547,14 @@ class ConverseInput: include no ``guardContent`` blocks, the guardrail operates on all messages in the request body and in any included prompt resource. """ + additional_model_request_fields: Document | None = None """ Additional inference parameters that the model supports, beyond the base set of inference parameters that ``Converse`` and ``ConverseStream`` support in the ``inferenceConfig`` field. For more information, see `Model parameters `_ . """ + prompt_variables: dict[str, PromptVariableValues] | None = field( repr=False, default=None ) @@ -9920,6 +10564,7 @@ class ConverseInput: field is ignored if you don't specify a prompt resource in the ``modelId`` field. """ + additional_model_response_field_paths: list[str] | None = None """ Additional model parameters field paths to return in the response. ``Converse`` @@ -9937,15 +10582,22 @@ class ConverseInput: but the requested field is not in the model response, it is ignored by ``Converse``. """ + request_metadata: dict[str, str] | None = field(repr=False, default=None) """ Key-value pairs that you can use to filter invocation logs. """ + performance_config: PerformanceConfiguration | None = None """ Model performance settings for the request. """ + service_tier: ServiceTier | None = None + """ + Specifies the processing tier configuration used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CONVERSE_INPUT, self) @@ -10013,6 +10665,11 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config, ) + if self.service_tier is not None: + serializer.write_struct( + _SCHEMA_CONVERSE_INPUT.members["serviceTier"], self.service_tier + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -10077,6 +10734,9 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: de ) + case 11: + kwargs["service_tier"] = ServiceTier.deserialize(de) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -10089,7 +10749,6 @@ class ConverseMetrics: """ Metrics for a call to ``Converse ``_ . - """ latency_ms: int @@ -10131,7 +10790,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ConverseOutputMessage: """ The message that the model generates. - """ value: Message @@ -10175,7 +10833,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The output from a call to `Converse `_ . - """ @@ -10304,22 +10961,25 @@ def _read_value(k: str, d: ShapeDeserializer): @dataclass(kw_only=True) class GuardrailTraceAssessment: """ - A Top level guardrail trace object. For more information, see ``ConverseTrace``. - + A Top level guardrail trace object. For more information, see ``ConverseTrace ``_ + . """ model_output: list[str] | None = None """ The output from the model. """ + input_assessment: dict[str, GuardrailAssessment] | None = None """ The input assessment. """ + output_assessments: dict[str, list[GuardrailAssessment]] | None = None """ the output assessments. """ + action_reason: str | None = None """ Provides the reason for the action taken when harmful content is detected. @@ -10403,7 +11063,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class PromptRouterTrace: """ A prompt router trace. - """ invoked_model_id: str | None = None @@ -10446,15 +11105,15 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: @dataclass(kw_only=True) class ConverseTrace: """ - The trace object in a response from ``Converse ``_. - Currently, you can only trace guardrails. - + The trace object in a response from ``Converse ``_ + . """ guardrail: GuardrailTraceAssessment | None = None """ The guardrail trace object. """ + prompt_router: PromptRouterTrace | None = None """ The request's prompt router. @@ -10501,7 +11160,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class TokenUsage: """ The tokens used in a message API inference call. - """ input_tokens: int @@ -10523,6 +11181,7 @@ class TokenUsage: """ The number of input tokens read from the cache for the request. """ + cache_write_input_tokens: int | None = None """ The number of input tokens written to the cache for the request. @@ -10622,15 +11281,22 @@ class ConverseOperationOutput: """ Additional fields in the response that are unique to the model. """ + trace: ConverseTrace | None = None """ A trace object that contains information about the Guardrail behavior. """ + performance_config: PerformanceConfiguration | None = None """ Model performance settings for the request. """ + service_tier: ServiceTier | None = None + """ + Specifies the processing tier configuration used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CONVERSE_OPERATION_OUTPUT, self) @@ -10666,6 +11332,12 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config, ) + if self.service_tier is not None: + serializer.write_struct( + _SCHEMA_CONVERSE_OPERATION_OUTPUT.members["serviceTier"], + self.service_tier, + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -10705,6 +11377,9 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: de ) + case 7: + kwargs["service_tier"] = ServiceTier.deserialize(de) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -10716,7 +11391,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ModelErrorException(ServiceError): """ The request failed due to an error while processing the model. - """ fault: Literal["client", "server"] | None = "client" @@ -10725,6 +11399,7 @@ class ModelErrorException(ServiceError): """ The original status code. """ + resource_name: str | None = None """ The resource name. @@ -10790,7 +11465,6 @@ class ModelNotReadyException(ServiceError): AWS SDK will automatically retry the operation up to 5 times. For information about configuring automatic retries, see ``Retry behavior ``_ in the *AWS SDKs and Tools* reference guide. - """ fault: Literal["client", "server"] | None = "client" @@ -10832,7 +11506,6 @@ class ModelTimeoutException(ServiceError): """ The request took too long to process. Processing time exceeded the model timeout length. - """ fault: Literal["client", "server"] | None = "client" @@ -10920,17 +11593,16 @@ class GuardrailStreamProcessingMode(StrEnum): @dataclass(kw_only=True) class GuardrailStreamConfiguration: """ - Configuration information for a guardrail that you use with the - ``ConverseStream`` action. - + Configuration information for a guardrail that you use with the ``ConverseStream ``_ + action. """ - guardrail_identifier: str + guardrail_identifier: str = "" """ The identifier for the guardrail. """ - guardrail_version: str + guardrail_version: str = "" """ The version of the guardrail. """ @@ -10939,6 +11611,7 @@ class GuardrailStreamConfiguration: """ The trace behavior for the guardrail. """ + stream_processing_mode: str = "sync" """ The processing mode. @@ -11042,15 +11715,18 @@ class ConverseStreamInput: The Converse API doesn't support `imported models `_ . """ + messages: list[Message] | None = None """ The messages that you want to send to the model. """ + system: list[SystemContentBlock] | None = None """ A prompt that provides instructions or context to the model about the task it should perform, or the persona it should adopt during the conversation. """ + inference_config: InferenceConfiguration | None = None """ Inference parameters to pass to the model. ``Converse`` and ``ConverseStream`` @@ -11058,6 +11734,7 @@ class ConverseStreamInput: parameters that the model supports, use the ``additionalModelRequestFields`` request field. """ + tool_config: ToolConfiguration | None = None """ Configuration information for the tools that the model can use when generating a @@ -11066,6 +11743,7 @@ class ConverseStreamInput: For information about models that support streaming tool use, see `Supported models and model features `_ . """ + guardrail_config: GuardrailStreamConfiguration | None = None """ Configuration information for a guardrail that you want to use in the request. @@ -11074,12 +11752,14 @@ class ConverseStreamInput: include no ``guardContent`` blocks, the guardrail operates on all messages in the request body and in any included prompt resource. """ + additional_model_request_fields: Document | None = None """ Additional inference parameters that the model supports, beyond the base set of inference parameters that ``Converse`` and ``ConverseStream`` support in the ``inferenceConfig`` field. For more information, see `Model parameters `_ . """ + prompt_variables: dict[str, PromptVariableValues] | None = field( repr=False, default=None ) @@ -11089,6 +11769,7 @@ class ConverseStreamInput: field is ignored if you don't specify a prompt resource in the ``modelId`` field. """ + additional_model_response_field_paths: list[str] | None = None """ Additional model parameters field paths to return in the response. ``Converse`` @@ -11106,15 +11787,22 @@ class ConverseStreamInput: but the requested field is not in the model response, it is ignored by ``Converse``. """ + request_metadata: dict[str, str] | None = field(repr=False, default=None) """ Key-value pairs that you can use to filter invocation logs. """ + performance_config: PerformanceConfiguration | None = None """ Model performance settings for the request. """ + service_tier: ServiceTier | None = None + """ + Specifies the processing tier configuration used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CONVERSE_STREAM_INPUT, self) @@ -11188,6 +11876,11 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config, ) + if self.service_tier is not None: + serializer.write_struct( + _SCHEMA_CONVERSE_STREAM_INPUT.members["serviceTier"], self.service_tier + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -11256,6 +11949,9 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: de ) + case 11: + kwargs["service_tier"] = ServiceTier.deserialize(de) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -11268,7 +11964,6 @@ class CitationSourceContentDelta: """ Contains incremental updates to the source content text during streaming responses, allowing clients to build up the cited content progressively. - """ text: str | None = None @@ -11341,18 +12036,24 @@ class CitationsDelta: Contains incremental updates to citation information during streaming responses. This allows clients to build up citation data progressively as the response is generated. - """ title: str | None = None """ The title or identifier of the source document being cited. """ + + source: str | None = None + """ + The source from the original search result that provided the cited content. + """ + source_content: list[CitationSourceContentDelta] | None = None """ The specific content from the source document that was referenced or cited in the generated response. """ + location: CitationLocation | None = None """ Specifies the precise location within a source document where cited content can @@ -11369,6 +12070,11 @@ def serialize_members(self, serializer: ShapeSerializer): _SCHEMA_CITATIONS_DELTA.members["title"], self.title ) + if self.source is not None: + serializer.write_string( + _SCHEMA_CITATIONS_DELTA.members["source"], self.source + ) + if self.source_content is not None: _serialize_citation_source_content_list_delta( serializer, @@ -11397,13 +12103,18 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: ) case 1: + kwargs["source"] = de.read_string( + _SCHEMA_CITATIONS_DELTA.members["source"] + ) + + case 2: kwargs["source_content"] = ( _deserialize_citation_source_content_list_delta( de, _SCHEMA_CITATIONS_DELTA.members["sourceContent"] ) ) - case 2: + case 3: kwargs["location"] = _CitationLocationDeserializer().deserialize(de) case _: @@ -11417,7 +12128,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ReasoningContentBlockDeltaText: """ The reasoning that the model used to return the output. - """ value: str @@ -11444,7 +12154,6 @@ class ReasoningContentBlockDeltaRedactedContent: """ The content in the reasoning that was encrypted by the model provider for safety reasons. The encryption doesn't affect the quality of responses. - """ value: bytes @@ -11472,7 +12181,6 @@ class ReasoningContentBlockDeltaSignature: A token that verifies that the reasoning text was generated by the model. If you pass a reasoning block back to the API in a multi-turn conversation, include the text and its signature unmodified. - """ value: str @@ -11529,7 +12237,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: respect to the content in the content block. Reasoning refers to a Chain of Thought (CoT) that the model generates to enhance the accuracy of its final response. - """ @@ -11573,11 +12280,122 @@ def _set_result(self, value: ReasoningContentBlockDelta) -> None: self._result = value +@dataclass +class ToolResultBlockDeltaText: + """ + The reasoning the model used to return the output. + """ + + value: str + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_TOOL_RESULT_BLOCK_DELTA, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_string( + _SCHEMA_TOOL_RESULT_BLOCK_DELTA.members["text"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls( + value=deserializer.read_string( + _SCHEMA_TOOL_RESULT_BLOCK_DELTA.members["text"] + ) + ) + + +@dataclass +class ToolResultBlockDeltaUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to receive the + parsed value. + + This value may not be deliberately sent. + """ + + tag: str + + def serialize(self, serializer: ShapeSerializer): + raise SerializationError("Unknown union variants may not be serialized.") + + def serialize_members(self, serializer: ShapeSerializer): + raise SerializationError("Unknown union variants may not be serialized.") + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + raise NotImplementedError() + + +ToolResultBlockDelta = Union[ToolResultBlockDeltaText | ToolResultBlockDeltaUnknown] + +""" +Contains incremental updates to tool results information during streaming +responses. This allows clients to build up tool results data progressively as +the response is generated. +""" + + +class _ToolResultBlockDeltaDeserializer: + _result: ToolResultBlockDelta | None = None + + def deserialize(self, deserializer: ShapeDeserializer) -> ToolResultBlockDelta: + self._result = None + deserializer.read_struct(_SCHEMA_TOOL_RESULT_BLOCK_DELTA, self._consumer) + + if self._result is None: + raise SerializationError( + "Unions must have exactly one value, but found none." + ) + + return self._result + + def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + self._set_result(ToolResultBlockDeltaText.deserialize(de)) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + def _set_result(self, value: ToolResultBlockDelta) -> None: + if self._result is not None: + raise SerializationError( + "Unions must have exactly one value, but found more than one." + ) + self._result = value + + +def _serialize_tool_result_blocks_delta( + serializer: ShapeSerializer, schema: Schema, value: list[ToolResultBlockDelta] +) -> None: + member_schema = schema.members["member"] + with serializer.begin_list(schema, len(value)) as ls: + for e in value: + ls.write_struct(member_schema, e) + + +def _deserialize_tool_result_blocks_delta( + deserializer: ShapeDeserializer, schema: Schema +) -> list[ToolResultBlockDelta]: + result: list[ToolResultBlockDelta] = [] + + def _read_value(d: ShapeDeserializer): + if d.is_null(): + d.read_null() + + else: + result.append(_ToolResultBlockDeltaDeserializer().deserialize(d)) + + deserializer.read_list(schema, _read_value) + return result + + @dataclass(kw_only=True) class ToolUseBlockDelta: """ The delta for a tool use block. - """ input: str @@ -11619,7 +12437,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ContentBlockDeltaText: """ The content text. - """ value: str @@ -11641,22 +12458,46 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ContentBlockDeltaToolUse: """ Information about a tool that the model is requesting to use. + """ + value: ToolUseBlockDelta + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_CONTENT_BLOCK_DELTA, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_CONTENT_BLOCK_DELTA.members["toolUse"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=ToolUseBlockDelta.deserialize(deserializer)) + + +@dataclass +class ContentBlockDeltaToolResult: + """ + An incremental update that contains the results from a tool call. """ - value: ToolUseBlockDelta + value: list[ToolResultBlockDelta] def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CONTENT_BLOCK_DELTA, self) def serialize_members(self, serializer: ShapeSerializer): - serializer.write_struct( - _SCHEMA_CONTENT_BLOCK_DELTA.members["toolUse"], self.value + _serialize_tool_result_blocks_delta( + serializer, _SCHEMA_CONTENT_BLOCK_DELTA.members["toolResult"], self.value ) @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: - return cls(value=ToolUseBlockDelta.deserialize(deserializer)) + return cls( + value=_deserialize_tool_result_blocks_delta( + deserializer, _SCHEMA_CONTENT_BLOCK_DELTA.members["toolResult"] + ) + ) @dataclass @@ -11665,7 +12506,6 @@ class ContentBlockDeltaReasoningContent: Contains content regarding the reasoning that is carried out by the model. Reasoning refers to a Chain of Thought (CoT) that the model generates to enhance the accuracy of its final response. - """ value: ReasoningContentBlockDelta @@ -11690,7 +12530,6 @@ class ContentBlockDeltaCitation: """ Incremental citation information that is streamed as part of the response generation process. - """ value: CitationsDelta @@ -11734,6 +12573,7 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: ContentBlockDelta = Union[ ContentBlockDeltaText | ContentBlockDeltaToolUse + | ContentBlockDeltaToolResult | ContentBlockDeltaReasoningContent | ContentBlockDeltaCitation | ContentBlockDeltaUnknown @@ -11741,7 +12581,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ A block of content in a streaming response. - """ @@ -11768,9 +12607,12 @@ def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: self._set_result(ContentBlockDeltaToolUse.deserialize(de)) case 2: - self._set_result(ContentBlockDeltaReasoningContent.deserialize(de)) + self._set_result(ContentBlockDeltaToolResult.deserialize(de)) case 3: + self._set_result(ContentBlockDeltaReasoningContent.deserialize(de)) + + case 4: self._set_result(ContentBlockDeltaCitation.deserialize(de)) case _: @@ -11788,7 +12630,6 @@ def _set_result(self, value: ContentBlockDelta) -> None: class ContentBlockDeltaEvent: """ The content block delta event. - """ delta: ContentBlockDelta @@ -11839,10 +12680,81 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: @dataclass(kw_only=True) -class ToolUseBlockStart: +class ToolResultBlockStart: + """ + The start of a tool result block. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide. + """ + + tool_use_id: str + """ + The ID of the tool that was used to generate this tool result block. + """ + + type: str | None = None + """ + The type for the tool that was used to generate this tool result block. + """ + + status: str | None = None """ - The start of a tool use block. + The status of the tool result block. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_TOOL_RESULT_BLOCK_START, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_string( + _SCHEMA_TOOL_RESULT_BLOCK_START.members["toolUseId"], self.tool_use_id + ) + if self.type is not None: + serializer.write_string( + _SCHEMA_TOOL_RESULT_BLOCK_START.members["type"], self.type + ) + + if self.status is not None: + serializer.write_string( + _SCHEMA_TOOL_RESULT_BLOCK_START.members["status"], self.status + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["tool_use_id"] = de.read_string( + _SCHEMA_TOOL_RESULT_BLOCK_START.members["toolUseId"] + ) + + case 1: + kwargs["type"] = de.read_string( + _SCHEMA_TOOL_RESULT_BLOCK_START.members["type"] + ) + + case 2: + kwargs["status"] = de.read_string( + _SCHEMA_TOOL_RESULT_BLOCK_START.members["status"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_TOOL_RESULT_BLOCK_START, consumer=_consumer) + return kwargs + +@dataclass(kw_only=True) +class ToolUseBlockStart: + """ + The start of a tool use block. For more information, see ``Call a tool with the Converse API ``_ + in the Amazon Bedrock User Guide. """ tool_use_id: str @@ -11855,6 +12767,11 @@ class ToolUseBlockStart: The name of the tool that the model is requesting to use. """ + type: str | None = None + """ + The type for the tool request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_TOOL_USE_BLOCK_START, self) @@ -11863,6 +12780,10 @@ def serialize_members(self, serializer: ShapeSerializer): _SCHEMA_TOOL_USE_BLOCK_START.members["toolUseId"], self.tool_use_id ) serializer.write_string(_SCHEMA_TOOL_USE_BLOCK_START.members["name"], self.name) + if self.type is not None: + serializer.write_string( + _SCHEMA_TOOL_USE_BLOCK_START.members["type"], self.type + ) @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: @@ -11884,6 +12805,11 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: _SCHEMA_TOOL_USE_BLOCK_START.members["name"] ) + case 2: + kwargs["type"] = de.read_string( + _SCHEMA_TOOL_USE_BLOCK_START.members["type"] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -11895,7 +12821,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ContentBlockStartToolUse: """ Information about a tool that the model is requesting to use. - """ value: ToolUseBlockStart @@ -11913,6 +12838,27 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(value=ToolUseBlockStart.deserialize(deserializer)) +@dataclass +class ContentBlockStartToolResult: + """ + The + """ + + value: ToolResultBlockStart + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_CONTENT_BLOCK_START, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_CONTENT_BLOCK_START.members["toolResult"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=ToolResultBlockStart.deserialize(deserializer)) + + @dataclass class ContentBlockStartUnknown: """Represents an unknown variant. @@ -11936,11 +12882,12 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: raise NotImplementedError() -ContentBlockStart = Union[ContentBlockStartToolUse | ContentBlockStartUnknown] +ContentBlockStart = Union[ + ContentBlockStartToolUse | ContentBlockStartToolResult | ContentBlockStartUnknown +] """ Content block start information. - """ @@ -11963,6 +12910,9 @@ def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: case 0: self._set_result(ContentBlockStartToolUse.deserialize(de)) + case 1: + self._set_result(ContentBlockStartToolResult.deserialize(de)) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -11978,7 +12928,6 @@ def _set_result(self, value: ContentBlockStart) -> None: class ContentBlockStartEvent: """ Content block start event. - """ start: ContentBlockStart @@ -12032,7 +12981,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ContentBlockStopEvent: """ A content block stop event. - """ content_block_index: int @@ -12075,7 +13023,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class MessageStartEvent: """ The start of a message. - """ role: str @@ -12115,7 +13062,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class MessageStopEvent: """ The stop event for a message. - """ stop_reason: str @@ -12174,7 +13120,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ConverseStreamMetrics: """ Metrics for the stream. - """ latency_ms: int @@ -12215,15 +13160,15 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: @dataclass(kw_only=True) class ConverseStreamTrace: """ - The trace object in a response from ``ConverseStream ``_. - Currently, you can only trace guardrails. - + The trace object in a response from ``ConverseStream ``_ + . """ guardrail: GuardrailTraceAssessment | None = None """ The guardrail trace object. """ + prompt_router: PromptRouterTrace | None = None """ The request's prompt router. @@ -12271,7 +13216,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ConverseStreamMetadataEvent: """ A conversation stream metadata event. - """ usage: TokenUsage @@ -12289,11 +13233,17 @@ class ConverseStreamMetadataEvent: The trace object in the response from `ConverseStream `_ that contains information about the guardrail behavior. """ + performance_config: PerformanceConfiguration | None = None """ Model performance configuration metadata for the conversation stream event. """ + service_tier: ServiceTier | None = None + """ + Specifies the processing tier configuration used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CONVERSE_STREAM_METADATA_EVENT, self) @@ -12315,6 +13265,12 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config, ) + if self.service_tier is not None: + serializer.write_struct( + _SCHEMA_CONVERSE_STREAM_METADATA_EVENT.members["serviceTier"], + self.service_tier, + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -12339,6 +13295,9 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: de ) + case 4: + kwargs["service_tier"] = ServiceTier.deserialize(de) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -12352,7 +13311,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ModelStreamErrorException(ServiceError): """ An error occurred while streaming the response. Retry your request. - """ fault: Literal["client", "server"] | None = "client" @@ -12361,6 +13319,7 @@ class ModelStreamErrorException(ServiceError): """ The original status code. """ + original_message: str | None = None """ The original message. @@ -12427,7 +13386,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ConverseStreamOutputMessageStart: """ Message start information. - """ value: MessageStartEvent @@ -12449,7 +13407,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputContentBlockStart: """ Start information for a content block. - """ value: ContentBlockStartEvent @@ -12471,7 +13428,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputContentBlockDelta: """ The messages output content block delta. - """ value: ContentBlockDeltaEvent @@ -12493,7 +13449,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputContentBlockStop: """ Stop information for a content block. - """ value: ContentBlockStopEvent @@ -12515,7 +13470,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputMessageStop: """ Message stop information. - """ value: MessageStopEvent @@ -12537,7 +13491,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputMetadata: """ Metadata for the converse output stream. - """ value: ConverseStreamMetadataEvent @@ -12559,7 +13512,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputInternalServerException: """ An internal server error occurred. Retry your request. - """ value: InternalServerException @@ -12582,7 +13534,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ConverseStreamOutputModelStreamErrorException: """ A streaming error occurred. Retry your request. - """ value: ModelStreamErrorException @@ -12606,8 +13557,7 @@ class ConverseStreamOutputValidationException: """ The input fails to satisfy the constraints specified by *Amazon Bedrock*. For troubleshooting this error, see `ValidationError `_ - in the Amazon Bedrock User Guide - + in the Amazon Bedrock User Guide. """ value: ValidationException @@ -12630,8 +13580,7 @@ class ConverseStreamOutputThrottlingException: """ Your request was denied due to exceeding the account quotas for *Amazon Bedrock*. For troubleshooting this error, see `ThrottlingException `_ - in the Amazon Bedrock User Guide - + in the Amazon Bedrock User Guide. """ value: ThrottlingException @@ -12655,7 +13604,6 @@ class ConverseStreamOutputServiceUnavailableException: The service isn't currently available. For troubleshooting this error, see `ServiceUnavailable `_ in the Amazon Bedrock User Guide - """ value: ServiceUnavailableException @@ -12714,7 +13662,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The messages output stream - """ @@ -12874,16 +13821,19 @@ class InvokeModelInput: For more information, see `Run inference `_ in the Bedrock User Guide. """ + content_type: str | None = None """ The MIME type of the input data in the request. You must specify ``application/json``. """ + accept: str | None = None """ The desired MIME type of the inference body in the response. The default value is ``application/json``. """ + model_id: str | None = None """ The unique identifier of the model to invoke to run inference. @@ -12913,11 +13863,13 @@ class InvokeModelInput: successful call to `CreateModelImportJob `_ or from the Imported models page in the Amazon Bedrock console. """ + trace: str | None = None """ Specifies whether to enable or disable the Bedrock trace. If enabled, you can see the full Bedrock trace. """ + guardrail_identifier: str | None = None """ The unique identifier of the guardrail that you want to use. If you don't @@ -12932,15 +13884,22 @@ class InvokeModelInput: * You provide a guardrail identifier, but ``guardrailVersion`` isn't specified. """ + guardrail_version: str | None = None """ The version number for the guardrail. The value can also be ``DRAFT``. """ + performance_config_latency: str = "standard" """ Model performance settings for the request. """ + service_tier: str | None = None + """ + Specifies the processing tier type used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_INVOKE_MODEL_INPUT, self) @@ -12986,6 +13945,11 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config_latency, ) + if self.service_tier is not None: + serializer.write_string( + _SCHEMA_INVOKE_MODEL_INPUT.members["serviceTier"], self.service_tier + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -13036,6 +14000,11 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: _SCHEMA_INVOKE_MODEL_INPUT.members["performanceConfigLatency"] ) + case 8: + kwargs["service_tier"] = de.read_string( + _SCHEMA_INVOKE_MODEL_INPUT.members["serviceTier"] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -13061,6 +14030,11 @@ class InvokeModelOutput: Model performance settings for the request. """ + service_tier: str | None = None + """ + Specifies the processing tier type used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_INVOKE_MODEL_OUTPUT, self) @@ -13075,6 +14049,11 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config_latency, ) + if self.service_tier is not None: + serializer.write_string( + _SCHEMA_INVOKE_MODEL_OUTPUT.members["serviceTier"], self.service_tier + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -13100,6 +14079,11 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: _SCHEMA_INVOKE_MODEL_OUTPUT.members["performanceConfigLatency"] ) + case 3: + kwargs["service_tier"] = de.read_string( + _SCHEMA_INVOKE_MODEL_OUTPUT.members["serviceTier"] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -13158,7 +14142,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class BidirectionalInputPayloadPart: """ Payload content for the bidirectional input. The input is an audio stream. - """ bytes_: bytes | None = field(repr=False, default=None) @@ -13203,7 +14186,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class InvokeModelWithBidirectionalStreamInputChunk: """ The audio chunk that is used as input for the invocation step. - """ value: BidirectionalInputPayloadPart @@ -13255,7 +14237,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Payload content, the speech chunk, for the bidirectional input of the invocation step. - """ @@ -13349,7 +14330,6 @@ class BidirectionalOutputPayloadPart: """ Output from the bidirectional stream. The output is speech and a text transcription. - """ bytes_: bytes | None = field(repr=False, default=None) @@ -13394,7 +14374,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class InvokeModelWithBidirectionalStreamOutputChunk: """ The speech chunk that was provided as output from the invocation step. - """ value: BidirectionalOutputPayloadPart @@ -13419,7 +14398,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class InvokeModelWithBidirectionalStreamOutputInternalServerException: """ The request encountered an unknown internal error. - """ value: InternalServerException @@ -13446,7 +14424,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class InvokeModelWithBidirectionalStreamOutputModelStreamErrorException: """ The request encountered an error with the model stream. - """ value: ModelStreamErrorException @@ -13474,7 +14451,6 @@ class InvokeModelWithBidirectionalStreamOutputValidationException: """ The input fails to satisfy the constraints specified by an Amazon Web Services service. - """ value: ValidationException @@ -13501,7 +14477,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class InvokeModelWithBidirectionalStreamOutputThrottlingException: """ The request was denied due to request throttling. - """ value: ThrottlingException @@ -13529,7 +14504,6 @@ class InvokeModelWithBidirectionalStreamOutputModelTimeoutException: """ The connection was closed because a request was not received within the timeout period. - """ value: ModelTimeoutException @@ -13556,7 +14530,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class InvokeModelWithBidirectionalStreamOutputServiceUnavailableException: """ The request has failed due to a temporary failure of the server. - """ value: ServiceUnavailableException @@ -13615,7 +14588,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Output from the bidirectional stream that was used for model invocation. - """ @@ -13786,16 +14758,19 @@ class InvokeModelWithResponseStreamInput: For more information, see `Run inference `_ in the Bedrock User Guide. """ + content_type: str | None = None """ The MIME type of the input data in the request. You must specify ``application/json``. """ + accept: str | None = None """ The desired MIME type of the inference body in the response. The default value is ``application/json``. """ + model_id: str | None = None """ The unique identifier of the model to invoke to run inference. @@ -13825,11 +14800,13 @@ class InvokeModelWithResponseStreamInput: successful call to `CreateModelImportJob `_ or from the Imported models page in the Amazon Bedrock console. """ + trace: str | None = None """ Specifies whether to enable or disable the Bedrock trace. If enabled, you can see the full Bedrock trace. """ + guardrail_identifier: str | None = None """ The unique identifier of the guardrail that you want to use. If you don't @@ -13844,15 +14821,22 @@ class InvokeModelWithResponseStreamInput: * You provide a guardrail identifier, but ``guardrailVersion`` isn't specified. """ + guardrail_version: str | None = None """ The version number for the guardrail. The value can also be ``DRAFT``. """ + performance_config_latency: str = "standard" """ Model performance settings for the request. """ + service_tier: str | None = None + """ + Specifies the processing tier type used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_INPUT, self) @@ -13911,6 +14895,12 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config_latency, ) + if self.service_tier is not None: + serializer.write_string( + _SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_INPUT.members["serviceTier"], + self.service_tier, + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -13973,6 +14963,13 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: ] ) + case 8: + kwargs["service_tier"] = de.read_string( + _SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_INPUT.members[ + "serviceTier" + ] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -13986,7 +14983,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class PayloadPart: """ Payload content included in the response. - """ bytes_: bytes | None = field(repr=False, default=None) @@ -14027,7 +15023,6 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: class ResponseStreamChunk: """ Content included in the response. - """ value: PayloadPart @@ -14047,7 +15042,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ResponseStreamInternalServerException: """ An internal server error occurred. Retry your request. - """ value: InternalServerException @@ -14069,7 +15063,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ResponseStreamModelStreamErrorException: """ An error occurred while streaming the response. Retry your request. - """ value: ModelStreamErrorException @@ -14091,7 +15084,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ResponseStreamValidationException: """ Input validation failed. Check your request parameters and retry the request. - """ value: ValidationException @@ -14115,7 +15107,6 @@ class ResponseStreamThrottlingException: Your request was throttled because of service-wide limitations. Resubmit your request later or in a different region. You can also purchase `Provisioned Throughput `_ to increase the rate or number of tokens you can process. - """ value: ThrottlingException @@ -14138,7 +15129,6 @@ class ResponseStreamModelTimeoutException: """ The request took too long to process. Processing time exceeded the model timeout length. - """ value: ModelTimeoutException @@ -14160,7 +15150,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: class ResponseStreamServiceUnavailableException: """ The service isn't available. Try again later. - """ value: ServiceUnavailableException @@ -14214,7 +15203,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ Definition of content in the response stream. - """ @@ -14282,6 +15270,11 @@ class InvokeModelWithResponseStreamOutput: Model performance settings for the request. """ + service_tier: str | None = None + """ + Specifies the processing tier type used for serving the request. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_OUTPUT, self) @@ -14298,6 +15291,12 @@ def serialize_members(self, serializer: ShapeSerializer): self.performance_config_latency, ) + if self.service_tier is not None: + serializer.write_string( + _SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_OUTPUT.members["serviceTier"], + self.service_tier, + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -14308,20 +15307,27 @@ def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: def _consumer(schema: Schema, de: ShapeDeserializer) -> None: match schema.expect_member_index(): - case 0: + case 1: kwargs["content_type"] = de.read_string( _SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_OUTPUT.members[ "contentType" ] ) - case 1: + case 2: kwargs["performance_config_latency"] = de.read_string( _SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_OUTPUT.members[ "performanceConfigLatency" ] ) + case 3: + kwargs["service_tier"] = de.read_string( + _SCHEMA_INVOKE_MODEL_WITH_RESPONSE_STREAM_OUTPUT.members[ + "serviceTier" + ] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -14388,13 +15394,13 @@ class ConverseTokensRequest: This structure mirrors the input format for the ``Converse`` operation, allowing you to count tokens for conversation-based inference requests. - """ messages: list[Message] | None = None """ An array of messages to count tokens for. """ + system: list[SystemContentBlock] | None = None """ The system content blocks to count tokens for. System content provides @@ -14402,6 +15408,19 @@ class ConverseTokensRequest: token count will include any system content provided. """ + tool_config: ToolConfiguration | None = None + """ + The toolConfig of Converse input request to count tokens for. Configuration + information for the tools that the model can use when generating a response. + """ + + additional_model_request_fields: Document | None = None + """ + The additionalModelRequestFields of Converse input request to count tokens for. + Use this field when you want to pass additional parameters that the model + supports. + """ + def serialize(self, serializer: ShapeSerializer): serializer.write_struct(_SCHEMA_CONVERSE_TOKENS_REQUEST, self) @@ -14420,6 +15439,17 @@ def serialize_members(self, serializer: ShapeSerializer): self.system, ) + if self.tool_config is not None: + serializer.write_struct( + _SCHEMA_CONVERSE_TOKENS_REQUEST.members["toolConfig"], self.tool_config + ) + + if self.additional_model_request_fields is not None: + serializer.write_document( + _SCHEMA_CONVERSE_TOKENS_REQUEST.members["additionalModelRequestFields"], + self.additional_model_request_fields, + ) + @classmethod def deserialize(cls, deserializer: ShapeDeserializer) -> Self: return cls(**cls.deserialize_kwargs(deserializer)) @@ -14440,6 +15470,16 @@ def _consumer(schema: Schema, de: ShapeDeserializer) -> None: de, _SCHEMA_CONVERSE_TOKENS_REQUEST.members["system"] ) + case 2: + kwargs["tool_config"] = ToolConfiguration.deserialize(de) + + case 3: + kwargs["additional_model_request_fields"] = de.read_document( + _SCHEMA_CONVERSE_TOKENS_REQUEST.members[ + "additionalModelRequestFields" + ] + ) + case _: logger.debug("Unexpected member schema: %s", schema) @@ -14453,7 +15493,6 @@ class InvokeModelTokensRequest: The body of an ``InvokeModel`` API request for token counting. This structure mirrors the input format for the ``InvokeModel`` operation, allowing you to count tokens for raw text inference requests. - """ body: bytes = field(repr=False) @@ -14502,7 +15541,6 @@ class CountTokensInputInvokeModel: An ``InvokeModel`` request for which to count tokens. Use this field when you want to count tokens for a raw text input that would be sent to the ``InvokeModel`` operation. - """ value: InvokeModelTokensRequest @@ -14526,7 +15564,6 @@ class CountTokensInputConverse: A ``Converse`` request for which to count tokens. Use this field when you want to count tokens for a conversation-based input that would be sent to the ``Converse`` operation. - """ value: ConverseTokensRequest @@ -14574,7 +15611,6 @@ def deserialize(cls, deserializer: ShapeDeserializer) -> Self: """ The input value for token counting. The value should be either an ``InvokeModel`` or ``Converse`` request body. - """ @@ -14619,6 +15655,7 @@ class CountTokensOperationInput: Each model processes tokens differently, so the token count is specific to the model you specify. """ + input: CountTokensInput | None = None """ The input for which to count tokens. The structure of this parameter depends on diff --git a/clients/aws-sdk-bedrock-runtime/tests/test_protocol.py b/clients/aws-sdk-bedrock-runtime/tests/test_protocol.py index 8058e3a..f5599f9 100644 --- a/clients/aws-sdk-bedrock-runtime/tests/test_protocol.py +++ b/clients/aws-sdk-bedrock-runtime/tests/test_protocol.py @@ -19,6 +19,8 @@ def __init__(self, request: HTTPRequest): class RequestTestHTTPClient: """An asynchronous HTTP client solely for testing purposes.""" + TIMEOUT_EXCEPTIONS = () + def __init__(self, *, client_config: HTTPClientConfiguration | None = None): self._client_config = client_config @@ -35,6 +37,8 @@ async def send( class ResponseTestHTTPClient: """An asynchronous HTTP client solely for testing purposes.""" + TIMEOUT_EXCEPTIONS = () + def __init__( self, *, diff --git a/clients/aws-sdk-python/pyproject.toml b/clients/aws-sdk-python/pyproject.toml index be6a01d..09548ec 100644 --- a/clients/aws-sdk-python/pyproject.toml +++ b/clients/aws-sdk-python/pyproject.toml @@ -22,10 +22,12 @@ classifiers = [ dependencies = [] [project.optional-dependencies] -bedrock_runtime = ["aws_sdk_bedrock_runtime==0.1.1"] -transcribe_streaming = ["aws_sdk_transcribe_streaming==0.1.0"] +bedrock_runtime = ["aws_sdk_bedrock_runtime==0.2.0"] +sagemaker_runtime_http2 = ["aws_sdk_sagemaker_runtime_http2==0.1.0"] +transcribe_streaming = ["aws_sdk_transcribe_streaming==0.2.0"] all = [ "aws_sdk_python[bedrock_runtime]", + "aws_sdk_python[sagemaker_runtime_http2]", "aws_sdk_python[transcribe_streaming]", ] diff --git a/clients/aws-sdk-python/src/aws_sdk_python/__init__.py b/clients/aws-sdk-python/src/aws_sdk_python/__init__.py index b5a49b8..f326146 100644 --- a/clients/aws-sdk-python/src/aws_sdk_python/__init__.py +++ b/clients/aws-sdk-python/src/aws_sdk_python/__init__.py @@ -1,6 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -__version__ = "0.1.2" +__version__ = "0.2.0" # TODO: Consider adding relative imports for services from the top level namespace? diff --git a/clients/aws-sdk-sagemaker-runtime-http2/CHANGELOG.md b/clients/aws-sdk-sagemaker-runtime-http2/CHANGELOG.md new file mode 100644 index 0000000..b0ce462 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/CHANGELOG.md @@ -0,0 +1,8 @@ +# Changelog + +## Unreleased + +## v0.1.0 + +### Features +* Initial client release with support for current Amazon SageMaker Runtime HTTP2 operations. diff --git a/clients/aws-sdk-sagemaker-runtime-http2/LICENSE b/clients/aws-sdk-sagemaker-runtime-http2/LICENSE new file mode 100644 index 0000000..67db858 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/clients/aws-sdk-sagemaker-runtime-http2/NOTICE b/clients/aws-sdk-sagemaker-runtime-http2/NOTICE new file mode 100644 index 0000000..616fc58 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/NOTICE @@ -0,0 +1 @@ +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/clients/aws-sdk-sagemaker-runtime-http2/README.md b/clients/aws-sdk-sagemaker-runtime-http2/README.md new file mode 100644 index 0000000..fa1cda1 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/README.md @@ -0,0 +1,14 @@ +## Amazon SageMaker Runtime HTTP2 Client + +The `aws_sdk_sagemaker_runtime_http2` client is still under active developement. +Changes may result in breaking changes prior to the release of version +1.0.0. + + +### Documentation + +Documentation is available in the `/docs` directory of this package. +Pages can be built into portable HTML files for the time being. You can +follow the instructions in the docs [README.md](https://github.com/awslabs/aws-sdk-python/blob/main/clients/aws-sdk-sagemaker-runtime-http/docs/README.md). + +For high-level documentation, you can view the [`dev-guide`](https://github.com/awslabs/aws-sdk-python/tree/main/dev-guide) at the top level of this repo. diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/Makefile b/clients/aws-sdk-sagemaker-runtime-http2/docs/Makefile new file mode 100644 index 0000000..59458fa --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/Makefile @@ -0,0 +1,15 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +SPHINXBUILD = sphinx-build +BUILDDIR = build +SERVICESDIR = source/reference/services +SPHINXOPTS = -j auto +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(SPHINXOPTS) . + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md b/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md new file mode 100644 index 0000000..141b2c7 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/README.md @@ -0,0 +1,10 @@ +## Generating Documentation + +Sphinx is used for documentation. You can generate HTML locally with the +following: + +``` +$ uv pip install --group docs . +$ cd docs +$ make html +``` diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/client/index.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/client/index.rst new file mode 100644 index 0000000..371f32a --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/client/index.rst @@ -0,0 +1,11 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +Client +======= +.. toctree:: + :maxdepth: 1 + :titlesonly: + :glob: + + * diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/client/invoke_endpoint_with_bidirectional_stream.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/client/invoke_endpoint_with_bidirectional_stream.rst new file mode 100644 index 0000000..6f8143a --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/client/invoke_endpoint_with_bidirectional_stream.rst @@ -0,0 +1,25 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +invoke_endpoint_with_bidirectional_stream +========================================= + +.. automethod:: aws_sdk_sagemaker_runtime_http2.client.SageMakerRuntimeHTTP2Client.invoke_endpoint_with_bidirectional_stream + +.. toctree:: + :hidden: + :maxdepth: 2 + +================= +Input: +================= + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.InvokeEndpointWithBidirectionalStreamInput + :members: + +================= +Output: +================= + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.InvokeEndpointWithBidirectionalStreamOutput + :members: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/conf.py b/clients/aws-sdk-sagemaker-runtime-http2/docs/conf.py new file mode 100644 index 0000000..0ff545c --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/conf.py @@ -0,0 +1,24 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +import os +import sys + +sys.path.insert(0, os.path.abspath("..")) + +project = "Amazon SageMaker Runtime HTTP2" +author = "Amazon Web Services" +release = "0.1.0" + +extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] + +templates_path = ["_templates"] +exclude_patterns = [] + +autodoc_default_options = { + "exclude-members": "deserialize,deserialize_kwargs,serialize,serialize_members" +} + +html_theme = "pydata_sphinx_theme" +html_theme_options = {"logo": {"text": "Amazon SageMaker Runtime HTTP2"}} + +autodoc_typehints = "description" diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/index.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/index.rst new file mode 100644 index 0000000..bebb6b1 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/index.rst @@ -0,0 +1,12 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +Amazon SageMaker Runtime HTTP2 +============================== + +.. toctree:: + :maxdepth: 2 + :titlesonly: + :glob: + + */index diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/make.bat b/clients/aws-sdk-sagemaker-runtime-http2/docs/make.bat new file mode 100644 index 0000000..3245132 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/make.bat @@ -0,0 +1,35 @@ +REM Code generated by smithy-python-codegen DO NOT EDIT. + +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set SERVICESDIR=source/reference/services +set SPHINXOPTS=-j auto +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . + +if "%1" == "" goto help + +if "%1" == "clean" ( + rmdir /S /Q %BUILDDIR% + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + echo. + echo "Build finished. The HTML pages are in %BUILDDIR%/html." + goto end +) + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InputValidationError.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InputValidationError.rst new file mode 100644 index 0000000..62049d4 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InputValidationError.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +InputValidationError +==================== + +.. autoexception:: aws_sdk_sagemaker_runtime_http2.models.InputValidationError + :members: + :show-inheritance: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InternalServerError.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InternalServerError.rst new file mode 100644 index 0000000..2056072 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InternalServerError.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +InternalServerError +=================== + +.. autoexception:: aws_sdk_sagemaker_runtime_http2.models.InternalServerError + :members: + :show-inheritance: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InternalStreamFailure.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InternalStreamFailure.rst new file mode 100644 index 0000000..a08da4b --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/InternalStreamFailure.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +InternalStreamFailure +===================== + +.. autoexception:: aws_sdk_sagemaker_runtime_http2.models.InternalStreamFailure + :members: + :show-inheritance: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ModelError.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ModelError.rst new file mode 100644 index 0000000..af0a19b --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ModelError.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +ModelError +========== + +.. autoexception:: aws_sdk_sagemaker_runtime_http2.models.ModelError + :members: + :show-inheritance: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ModelStreamError.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ModelStreamError.rst new file mode 100644 index 0000000..402ca42 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ModelStreamError.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +ModelStreamError +================ + +.. autoexception:: aws_sdk_sagemaker_runtime_http2.models.ModelStreamError + :members: + :show-inheritance: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestPayloadPart.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestPayloadPart.rst new file mode 100644 index 0000000..eeeca1b --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestPayloadPart.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +RequestPayloadPart +================== + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.RequestPayloadPart + :members: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEvent.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEvent.rst new file mode 100644 index 0000000..f1050c7 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEvent.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _RequestStreamEvent: + +RequestStreamEvent +================== + +.. autodata:: aws_sdk_sagemaker_runtime_http2.models.RequestStreamEvent diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEventPayloadPart.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEventPayloadPart.rst new file mode 100644 index 0000000..fc4834e --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEventPayloadPart.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _RequestStreamEventPayloadPart: + +RequestStreamEventPayloadPart +============================= + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.RequestStreamEventPayloadPart diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEventUnknown.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEventUnknown.rst new file mode 100644 index 0000000..769b832 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/RequestStreamEventUnknown.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _RequestStreamEventUnknown: + +RequestStreamEventUnknown +========================= + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.RequestStreamEventUnknown diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponsePayloadPart.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponsePayloadPart.rst new file mode 100644 index 0000000..cc0e7a7 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponsePayloadPart.rst @@ -0,0 +1,8 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +ResponsePayloadPart +=================== + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.ResponsePayloadPart + :members: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEvent.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEvent.rst new file mode 100644 index 0000000..9089eef --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEvent.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ResponseStreamEvent: + +ResponseStreamEvent +=================== + +.. autodata:: aws_sdk_sagemaker_runtime_http2.models.ResponseStreamEvent diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventInternalStreamFailure.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventInternalStreamFailure.rst new file mode 100644 index 0000000..c6047ed --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventInternalStreamFailure.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ResponseStreamEventInternalStreamFailure: + +ResponseStreamEventInternalStreamFailure +======================================== + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.ResponseStreamEventInternalStreamFailure diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventModelStreamError.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventModelStreamError.rst new file mode 100644 index 0000000..df57dcb --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventModelStreamError.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ResponseStreamEventModelStreamError: + +ResponseStreamEventModelStreamError +=================================== + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.ResponseStreamEventModelStreamError diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventPayloadPart.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventPayloadPart.rst new file mode 100644 index 0000000..fab46c7 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventPayloadPart.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ResponseStreamEventPayloadPart: + +ResponseStreamEventPayloadPart +============================== + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.ResponseStreamEventPayloadPart diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventUnknown.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventUnknown.rst new file mode 100644 index 0000000..5a8f95a --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ResponseStreamEventUnknown.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +.. _ResponseStreamEventUnknown: + +ResponseStreamEventUnknown +========================== + +.. autoclass:: aws_sdk_sagemaker_runtime_http2.models.ResponseStreamEventUnknown diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ServiceUnavailableError.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ServiceUnavailableError.rst new file mode 100644 index 0000000..b245543 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/ServiceUnavailableError.rst @@ -0,0 +1,9 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +ServiceUnavailableError +======================= + +.. autoexception:: aws_sdk_sagemaker_runtime_http2.models.ServiceUnavailableError + :members: + :show-inheritance: diff --git a/clients/aws-sdk-sagemaker-runtime-http2/docs/models/index.rst b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/index.rst new file mode 100644 index 0000000..c403929 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/docs/models/index.rst @@ -0,0 +1,11 @@ +.. + Code generated by smithy-python-codegen DO NOT EDIT. + +Models +======= +.. toctree:: + :maxdepth: 1 + :titlesonly: + :glob: + + * diff --git a/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml new file mode 100644 index 0000000..8b9a41a --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/pyproject.toml @@ -0,0 +1,73 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + + +[project] +name = "aws_sdk_sagemaker_runtime_http2" +version = "0.1.0" +description = "aws_sdk_sagemaker_runtime_http2 client" +readme = "README.md" +requires-python = ">=3.12" +keywords = ["smithy", "aws_sdk_sagemaker_runtime_http2"] +license = {text = "Apache-2.0"} +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Natural Language :: English", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14" +] + +dependencies = [ + "smithy_aws_core[eventstream, json]~=0.2.0", + "smithy_core~=0.2.0", + "smithy_http[awscrt]~=0.3.0" +] + +[dependency-groups] +test = [ + "pytest>=7.2.0,<8.0.0", + "pytest-asyncio>=0.20.3,<0.21.0" +] + +docs = [ + "pydata-sphinx-theme>=0.16.1", + "sphinx>=8.2.3" +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.bdist] +exclude = [ + "tests", + "docs", +] + +[tool.pyright] +typeCheckingMode = "strict" +reportPrivateUsage = false +reportUnusedFunction = false +reportUnusedVariable = false +reportUnnecessaryComparison = false +reportUnusedClass = false +enableExperimentalFeatures = true + +[tool.ruff] +target-version = "py312" + +[tool.ruff.lint] +ignore = ["F841"] + +[tool.ruff.format] +skip-magic-trailing-comma = true + +[tool.pytest.ini_options] +python_classes = ["!Test"] +asyncio_mode = "auto" diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/__init__.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/__init__.py new file mode 100644 index 0000000..e1ee049 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/__init__.py @@ -0,0 +1,3 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +__version__: str = "0.1.0" diff --git a/clients/aws-sdk-bedrock-runtime/docs/client/__init__.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/_private/__init__.py similarity index 100% rename from clients/aws-sdk-bedrock-runtime/docs/client/__init__.py rename to clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/_private/__init__.py diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/_private/schemas.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/_private/schemas.py new file mode 100644 index 0000000..6701d12 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/_private/schemas.py @@ -0,0 +1,3369 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from types import MappingProxyType + +from smithy_core.prelude import INTEGER, STRING +from smithy_core.schemas import Schema +from smithy_core.shapes import ShapeID, ShapeType +from smithy_core.traits import Trait + + +INPUT_VALIDATION_ERROR = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#InputValidationError"), + traits=[ + Trait.new(id=ShapeID("smithy.api#error"), value="client"), + Trait.new(id=ShapeID("smithy.api#httpError"), value=400), + ], + members={"Message": {"target": STRING}, "ErrorCode": {"target": STRING}}, +) + +INTERNAL_SERVER_ERROR = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#InternalServerError"), + traits=[ + Trait.new(id=ShapeID("smithy.api#error"), value="server"), + Trait.new(id=ShapeID("smithy.api#httpError"), value=500), + ], + members={"Message": {"target": STRING}, "ErrorCode": {"target": STRING}}, +) + +INTERNAL_STREAM_FAILURE = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#InternalStreamFailure"), + traits=[Trait.new(id=ShapeID("smithy.api#error"), value="server")], + members={"Message": {"target": STRING}}, +) + +SENSITIVE_BLOB = Schema( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#SensitiveBlob"), + shape_type=ShapeType.BLOB, + traits=[Trait.new(id=ShapeID("smithy.api#sensitive"))], +) + +REQUEST_PAYLOAD_PART = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#RequestPayloadPart"), + members={ + "Bytes": { + "target": SENSITIVE_BLOB, + "traits": [Trait.new(id=ShapeID("smithy.api#eventPayload"))], + }, + "DataType": { + "target": STRING, + "traits": [ + Trait.new(id=ShapeID("smithy.api#eventHeader")), + Trait.new( + id=ShapeID("smithy.api#pattern"), value="^(UTF8)$|^(BINARY)$" + ), + ], + }, + "CompletionState": { + "target": STRING, + "traits": [ + Trait.new(id=ShapeID("smithy.api#eventHeader")), + Trait.new( + id=ShapeID("smithy.api#pattern"), value="^(PARTIAL)$|^(COMPLETE)$" + ), + ], + }, + "P": { + "target": STRING, + "traits": [Trait.new(id=ShapeID("smithy.api#eventHeader"))], + }, + }, +) + +REQUEST_STREAM_EVENT = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#RequestStreamEvent"), + shape_type=ShapeType.UNION, + traits=[Trait.new(id=ShapeID("smithy.api#streaming"))], + members={"PayloadPart": {"target": REQUEST_PAYLOAD_PART}}, +) + +INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT = Schema.collection( + id=ShapeID( + "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStreamInput" + ), + traits=[Trait.new(id=ShapeID("smithy.api#input"))], + members={ + "EndpointName": { + "target": STRING, + "traits": [ + Trait.new(id=ShapeID("smithy.api#required")), + Trait.new(id=ShapeID("smithy.api#httpLabel")), + Trait.new( + id=ShapeID("smithy.api#length"), value=MappingProxyType({"max": 63}) + ), + Trait.new( + id=ShapeID("smithy.api#pattern"), + value="^[a-zA-Z0-9](-*[a-zA-Z0-9])*", + ), + ], + }, + "Body": { + "target": REQUEST_STREAM_EVENT, + "traits": [ + Trait.new(id=ShapeID("smithy.api#required")), + Trait.new(id=ShapeID("smithy.api#httpPayload")), + ], + }, + "TargetVariant": { + "target": STRING, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-SageMaker-Target-Variant", + ), + Trait.new( + id=ShapeID("smithy.api#length"), value=MappingProxyType({"max": 63}) + ), + Trait.new( + id=ShapeID("smithy.api#pattern"), + value="^[a-zA-Z0-9](-*[a-zA-Z0-9])*", + ), + ], + }, + "ModelInvocationPath": { + "target": STRING, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-SageMaker-Model-Invocation-Path", + ), + Trait.new( + id=ShapeID("smithy.api#length"), + value=MappingProxyType({"max": 100}), + ), + Trait.new( + id=ShapeID("smithy.api#pattern"), + value="^[A-Za-z0-9\\-._]+(?:/[A-Za-z0-9\\-._]+)*$", + ), + ], + }, + "ModelQueryString": { + "target": STRING, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-SageMaker-Model-Query-String", + ), + Trait.new( + id=ShapeID("smithy.api#length"), + value=MappingProxyType({"max": 2048}), + ), + Trait.new( + id=ShapeID("smithy.api#pattern"), + value="^[a-zA-Z0-9][A-Za-z0-9_-]*=(?:[A-Za-z0-9._~\\-]|%[0-9A-Fa-f]{2})+(?:&[a-zA-Z0-9][A-Za-z0-9_-]*=(?:[A-Za-z0-9._~\\-]|%[0-9A-Fa-f]{2})+)*$", + ), + ], + }, + }, +) + +MODEL_STREAM_ERROR = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#ModelStreamError"), + traits=[Trait.new(id=ShapeID("smithy.api#error"), value="client")], + members={"Message": {"target": STRING}, "ErrorCode": {"target": STRING}}, +) + +RESPONSE_PAYLOAD_PART = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#ResponsePayloadPart"), + members={ + "Bytes": { + "target": SENSITIVE_BLOB, + "traits": [Trait.new(id=ShapeID("smithy.api#eventPayload"))], + }, + "DataType": { + "target": STRING, + "traits": [ + Trait.new(id=ShapeID("smithy.api#eventHeader")), + Trait.new( + id=ShapeID("smithy.api#pattern"), value="^(UTF8)$|^(BINARY)$" + ), + ], + }, + "CompletionState": { + "target": STRING, + "traits": [ + Trait.new(id=ShapeID("smithy.api#eventHeader")), + Trait.new( + id=ShapeID("smithy.api#pattern"), value="^(PARTIAL)$|^(COMPLETE)$" + ), + ], + }, + "P": { + "target": STRING, + "traits": [Trait.new(id=ShapeID("smithy.api#eventHeader"))], + }, + }, +) + +RESPONSE_STREAM_EVENT = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#ResponseStreamEvent"), + shape_type=ShapeType.UNION, + traits=[Trait.new(id=ShapeID("smithy.api#streaming"))], + members={ + "PayloadPart": {"target": RESPONSE_PAYLOAD_PART}, + "ModelStreamError": {"target": MODEL_STREAM_ERROR}, + "InternalStreamFailure": {"target": INTERNAL_STREAM_FAILURE}, + }, +) + +INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT = Schema.collection( + id=ShapeID( + "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStreamOutput" + ), + traits=[Trait.new(id=ShapeID("smithy.api#output"))], + members={ + "Body": { + "target": RESPONSE_STREAM_EVENT, + "traits": [ + Trait.new(id=ShapeID("smithy.api#required")), + Trait.new(id=ShapeID("smithy.api#httpPayload")), + ], + }, + "InvokedProductionVariant": { + "target": STRING, + "traits": [ + Trait.new( + id=ShapeID("smithy.api#httpHeader"), + value="X-Amzn-Invoked-Production-Variant", + ), + Trait.new( + id=ShapeID("smithy.api#length"), + value=MappingProxyType({"max": 1024}), + ), + Trait.new(id=ShapeID("smithy.api#pattern"), value="\\p{ASCII}*"), + ], + }, + }, +) + +MODEL_ERROR = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#ModelError"), + traits=[ + Trait.new(id=ShapeID("smithy.api#error"), value="client"), + Trait.new(id=ShapeID("smithy.api#httpError"), value=424), + ], + members={ + "Message": {"target": STRING}, + "OriginalStatusCode": {"target": INTEGER}, + "OriginalMessage": {"target": STRING}, + "LogStreamArn": {"target": STRING}, + "ErrorCode": {"target": STRING}, + }, +) + +SERVICE_UNAVAILABLE_ERROR = Schema.collection( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#ServiceUnavailableError"), + traits=[ + Trait.new(id=ShapeID("smithy.api#error"), value="server"), + Trait.new(id=ShapeID("smithy.api#httpError"), value=503), + ], + members={"Message": {"target": STRING}, "ErrorCode": {"target": STRING}}, +) + +INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM = Schema( + id=ShapeID( + "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStream" + ), + shape_type=ShapeType.OPERATION, + traits=[ + Trait.new( + id=ShapeID("smithy.api#http"), + value=MappingProxyType( + { + "method": "POST", + "uri": "/endpoints/{EndpointName}/invocations-bidirectional-stream", + } + ), + ) + ], +) + +AMAZON_SAGE_MAKER_RUNTIME_HTTP2 = Schema( + id=ShapeID("com.amazonaws.sagemakerruntimehttp2#AmazonSageMakerRuntimeHttp2"), + shape_type=ShapeType.SERVICE, + traits=[ + Trait.new( + id=ShapeID("aws.auth#sigv4"), value=MappingProxyType({"name": "sagemaker"}) + ), + Trait.new( + id=ShapeID("smithy.api#title"), value="Amazon SageMaker Runtime HTTP2" + ), + Trait.new( + id=ShapeID("smithy.rules#endpointTests"), + value=MappingProxyType( + { + "testCases": ( + MappingProxyType( + { + "documentation": "For custom endpoint with region not set and fips disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + {"url": "https://example.com"} + ) + } + ), + "params": MappingProxyType( + { + "Endpoint": "https://example.com", + "UseFIPS": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For custom endpoint with fips enabled", + "expect": MappingProxyType( + { + "error": "Invalid Configuration: FIPS and custom endpoint are not supported" + } + ), + "params": MappingProxyType( + {"Endpoint": "https://example.com", "UseFIPS": True} + ), + } + ), + MappingProxyType( + { + "documentation": "For custom endpoint with fips disabled and dualstack enabled", + "expect": MappingProxyType( + { + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" + } + ), + "params": MappingProxyType( + { + "Endpoint": "https://example.com", + "UseFIPS": False, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-east-1 with FIPS enabled and DualStack enabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-east-1.api.aws:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-east-1", + "UseFIPS": True, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-east-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-east-1.amazonaws.com:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-east-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-east-1 with FIPS disabled and DualStack enabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-east-1.api.aws:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-east-1", + "UseFIPS": False, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-east-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-east-1.amazonaws.com:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-east-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region cn-northwest-1 with FIPS enabled and DualStack enabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.cn-northwest-1.api.amazonwebservices.com.cn:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "cn-northwest-1", + "UseFIPS": True, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region cn-northwest-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.cn-northwest-1.amazonaws.com.cn:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "cn-northwest-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack enabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.cn-northwest-1.api.amazonwebservices.com.cn:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "cn-northwest-1", + "UseFIPS": False, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.cn-northwest-1.amazonaws.com.cn:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "cn-northwest-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region eusc-de-east-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.eusc-de-east-1.amazonaws.eu:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "eusc-de-east-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region eusc-de-east-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.eusc-de-east-1.amazonaws.eu:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "eusc-de-east-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-iso-east-1.c2s.ic.gov:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-iso-east-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-iso-east-1.c2s.ic.gov:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-iso-east-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-isob-east-1.sc2s.sgov.gov:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-isob-east-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-isob-east-1.sc2s.sgov.gov:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-isob-east-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region eu-isoe-west-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker-fips.eu-isoe-west-1.cloud.adc-e.uk" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "eu-isoe-west-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region eu-isoe-west-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.eu-isoe-west-1.cloud.adc-e.uk" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "eu-isoe-west-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-isof-south-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-isof-south-1.csp.hci.ic.gov:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-isof-south-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-isof-south-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-isof-south-1.csp.hci.ic.gov:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-isof-south-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack enabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-gov-west-1.api.aws:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-gov-west-1", + "UseFIPS": True, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.us-gov-west-1.amazonaws.com:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-gov-west-1", + "UseFIPS": True, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack enabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-gov-west-1.api.aws:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-gov-west-1", + "UseFIPS": False, + "UseDualStack": True, + } + ), + } + ), + MappingProxyType( + { + "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack disabled", + "expect": MappingProxyType( + { + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.us-gov-west-1.amazonaws.com:8443" + } + ) + } + ), + "params": MappingProxyType( + { + "Region": "us-gov-west-1", + "UseFIPS": False, + "UseDualStack": False, + } + ), + } + ), + MappingProxyType( + { + "documentation": "Missing region", + "expect": MappingProxyType( + {"error": "Invalid Configuration: Missing Region"} + ), + } + ), + ), + "version": "1.0", + } + ), + ), + Trait.new( + id=ShapeID("smithy.rules#endpointRuleSet"), + value=MappingProxyType( + { + "version": "1.0", + "parameters": MappingProxyType( + { + "UseDualStack": MappingProxyType( + { + "builtIn": "AWS::UseDualStack", + "required": True, + "default": False, + "documentation": "When true, use the dual-stack endpoint. If the configured endpoint does not support dual-stack, dispatching the request MAY return an error.", + "type": "boolean", + } + ), + "UseFIPS": MappingProxyType( + { + "builtIn": "AWS::UseFIPS", + "required": True, + "default": False, + "documentation": "When true, send this request to the FIPS-compliant regional endpoint. If the configured endpoint does not have a FIPS compliant endpoint, dispatching the request will return an error.", + "type": "boolean", + } + ), + "Endpoint": MappingProxyType( + { + "builtIn": "SDK::Endpoint", + "required": False, + "documentation": "Override the endpoint used to send this request", + "type": "string", + } + ), + "Region": MappingProxyType( + { + "builtIn": "AWS::Region", + "required": False, + "documentation": "The AWS region used to dispatch the request.", + "type": "string", + } + ), + } + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "isSet", + "argv": ( + MappingProxyType({"ref": "Endpoint"}), + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + {"ref": "UseFIPS"} + ), + True, + ), + } + ), + ), + "error": "Invalid Configuration: FIPS and custom endpoint are not supported", + "type": "error", + } + ), + MappingProxyType( + { + "conditions": (), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", + "type": "error", + } + ), + MappingProxyType( + { + "conditions": (), + "endpoint": MappingProxyType( + { + "url": MappingProxyType( + {"ref": "Endpoint"} + ), + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + ), + "type": "tree", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": (), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "isSet", + "argv": ( + MappingProxyType( + {"ref": "Region"} + ), + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "aws.partition", + "argv": ( + MappingProxyType( + { + "ref": "Region" + } + ), + ), + "assign": "PartitionResult", + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-cn", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-cn", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-cn", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-cn", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-us-gov", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-us-gov", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-us-gov", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-us-gov", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-b", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-b", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-b", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-b", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-f", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-f", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-f", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-iso-f", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-eusc", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-eusc", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-eusc", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "stringEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "name", + ), + } + ), + "aws-eusc", + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "endpoint": MappingProxyType( + { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + True, + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "supportsFIPS", + ), + } + ), + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + True, + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "supportsDualStack", + ), + } + ), + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": (), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": (), + "error": "FIPS and DualStack are enabled, but this partition does not support one or both", + "type": "error", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + True, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + False, + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "supportsFIPS", + ), + } + ), + True, + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": (), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker-fips.{Region}.{PartitionResult#dnsSuffix}", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": (), + "error": "FIPS is enabled but this partition does not support FIPS", + "type": "error", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseFIPS" + } + ), + False, + ), + } + ), + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + MappingProxyType( + { + "ref": "UseDualStack" + } + ), + True, + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": ( + MappingProxyType( + { + "fn": "booleanEquals", + "argv": ( + True, + MappingProxyType( + { + "fn": "getAttr", + "argv": ( + MappingProxyType( + { + "ref": "PartitionResult" + } + ), + "supportsDualStack", + ), + } + ), + ), + } + ), + ), + "rules": ( + MappingProxyType( + { + "conditions": (), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": (), + "error": "DualStack is enabled but this partition does not support DualStack", + "type": "error", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": (), + "endpoint": MappingProxyType( + { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}", + "properties": MappingProxyType( + {} + ), + "headers": MappingProxyType( + {} + ), + } + ), + "type": "endpoint", + } + ), + ), + "type": "tree", + } + ), + ), + "type": "tree", + } + ), + MappingProxyType( + { + "conditions": (), + "error": "Invalid Configuration: Missing Region", + "type": "error", + } + ), + ), + "type": "tree", + } + ), + ), + } + ), + ), + Trait.new( + id=ShapeID("aws.api#service"), + value=MappingProxyType( + { + "sdkId": "SageMaker Runtime HTTP2", + "arnNamespace": "sagemaker", + "cloudFormationName": "SageMakerRuntime", + "endpointPrefix": "runtime.sagemaker", + } + ), + ), + Trait.new( + id=ShapeID("aws.endpoints#standardRegionalEndpoints"), + value=MappingProxyType( + { + "partitionSpecialCases": MappingProxyType( + { + "aws": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + "aws-cn": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + "aws-us-gov": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + "aws-iso": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + "aws-iso-b": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + "aws-iso-f": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + "aws-eusc": ( + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": False, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": False, + "dualStack": True, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": True, + "dualStack": False, + } + ), + MappingProxyType( + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": True, + "dualStack": True, + } + ), + ), + } + ) + } + ), + ), + Trait.new( + id=ShapeID("aws.protocols#restJson1"), + value=MappingProxyType({"http": ("h2",), "eventStreamHttp": ("h2",)}), + ), + ], +) diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/auth.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/auth.py new file mode 100644 index 0000000..fcc4043 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/auth.py @@ -0,0 +1,29 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from typing import Any + +from smithy_core.auth import AuthOption, AuthParams +from smithy_core.interfaces.auth import AuthOption as AuthOptionProtocol +from smithy_core.shapes import ShapeID + + +class HTTPAuthSchemeResolver: + def resolve_auth_scheme( + self, auth_parameters: AuthParams[Any, Any] + ) -> list[AuthOptionProtocol]: + auth_options: list[AuthOptionProtocol] = [] + + if (option := _generate_sigv4_option(auth_parameters)) is not None: + auth_options.append(option) + + return auth_options + + +def _generate_sigv4_option( + auth_params: AuthParams[Any, Any], +) -> AuthOptionProtocol | None: + return AuthOption( + scheme_id=ShapeID("aws.auth#sigv4"), + identity_properties={}, # type: ignore + signer_properties={}, # type: ignore + ) diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/client.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/client.py new file mode 100644 index 0000000..2829a88 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/client.py @@ -0,0 +1,145 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from copy import deepcopy +import logging + +from smithy_core.aio.client import ClientCall, RequestPipeline +from smithy_core.aio.eventstream import DuplexEventStream +from smithy_core.exceptions import ExpectationNotMetError +from smithy_core.interceptors import InterceptorChain +from smithy_core.interfaces.retries import RetryStrategy +from smithy_core.retries import RetryStrategyOptions, RetryStrategyResolver +from smithy_core.types import TypedProperties +from smithy_http.plugins import user_agent_plugin + +from .config import Config, Plugin +from .models import ( + INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM, + InvokeEndpointWithBidirectionalStreamInput, + InvokeEndpointWithBidirectionalStreamOutput, + RequestStreamEvent, + ResponseStreamEvent, + _ResponseStreamEventDeserializer, +) +from .user_agent import aws_user_agent_plugin + + +logger = logging.getLogger(__name__) + + +class SageMakerRuntimeHTTP2Client: + """ + The Amazon SageMaker AI runtime HTTP/2 API. + + :param config: Optional configuration for the client. Here you can set things like the + endpoint for HTTP services or auth credentials. + + :param plugins: A list of callables that modify the configuration dynamically. These + can be used to set defaults, for example. + """ + + def __init__( + self, config: Config | None = None, plugins: list[Plugin] | None = None + ): + self._config = config or Config() + + client_plugins: list[Plugin] = [aws_user_agent_plugin, user_agent_plugin] + if plugins: + client_plugins.extend(plugins) + + for plugin in client_plugins: + plugin(self._config) + + self._retry_strategy_resolver = RetryStrategyResolver() + + async def invoke_endpoint_with_bidirectional_stream( + self, + input: InvokeEndpointWithBidirectionalStreamInput, + plugins: list[Plugin] | None = None, + ) -> DuplexEventStream[ + RequestStreamEvent, + ResponseStreamEvent, + InvokeEndpointWithBidirectionalStreamOutput, + ]: + """ + Invokes a model endpoint with bidirectional streaming capabilities. This + operation establishes a persistent connection that allows you to send multiple + requests and receive streaming responses from the model in real-time. + + Bidirectional streaming is useful for interactive applications such as chatbots, + real-time translation, or any scenario where you need to maintain a + conversation-like interaction with the model. The connection remains open, + allowing you to send additional input and receive responses without establishing + a new connection for each request. + + For an overview of Amazon SageMaker AI, see `How It Works `_ + . + + Amazon SageMaker AI strips all POST headers except those supported by the API. + Amazon SageMaker AI might add additional headers. You should not rely on the + behavior of headers outside those enumerated in the request syntax. + + Calls to ``InvokeEndpointWithBidirectionalStream`` are authenticated by using Amazon Web Services Signature Version 4. For information, see `Authenticating Requests (Amazon Web Services Signature Version 4) `_ + in the *Amazon S3 API Reference*. + + The bidirectional stream maintains the connection until either the client closes + it or the model indicates completion. Each request and response in the stream is + sent as an event with optional headers for data type and completion state. + + .. note:: + Endpoints are scoped to an individual account, and are not public. The URL does + not contain the account ID, but Amazon SageMaker AI determines the account ID + from the authentication token that is supplied by the caller. + + :param input: The operation's input. + + :param plugins: A list of callables that modify the configuration dynamically. + Changes made by these plugins only apply for the duration of the operation + execution and will not affect any other operation invocations. + """ + operation_plugins: list[Plugin] = [] + if plugins: + operation_plugins.extend(plugins) + config = deepcopy(self._config) + for plugin in operation_plugins: + plugin(config) + if config.protocol is None or config.transport is None: + raise ExpectationNotMetError( + "protocol and transport MUST be set on the config to make calls." + ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) + call = ClientCall( + input=input, + operation=INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM, + context=TypedProperties({"config": config}), + interceptor=InterceptorChain(config.interceptors), + auth_scheme_resolver=config.auth_scheme_resolver, + supported_auth_schemes=config.auth_schemes, + endpoint_resolver=config.endpoint_resolver, + retry_strategy=retry_strategy, + ) + + return await pipeline.duplex_stream( + call, + RequestStreamEvent, + ResponseStreamEvent, + _ResponseStreamEventDeserializer().deserialize, + ) diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/config.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/config.py new file mode 100644 index 0000000..81a3692 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/config.py @@ -0,0 +1,184 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from dataclasses import dataclass +from typing import Any, Callable, TypeAlias, Union + +from smithy_aws_core.aio.protocols import RestJsonClientProtocol +from smithy_aws_core.auth import SigV4AuthScheme +from smithy_aws_core.endpoints.standard_regional import ( + StandardRegionalEndpointsResolver as _RegionalResolver, +) +from smithy_aws_core.identity import AWSCredentialsIdentity, AWSIdentityProperties +from smithy_core.aio.interfaces import ( + ClientProtocol, + ClientTransport, + EndpointResolver as _EndpointResolver, +) +from smithy_core.aio.interfaces.auth import AuthScheme +from smithy_core.aio.interfaces.identity import IdentityResolver +from smithy_core.interceptors import Interceptor +from smithy_core.interfaces import URI +from smithy_core.interfaces.retries import RetryStrategy +from smithy_core.retries import RetryStrategyOptions +from smithy_core.shapes import ShapeID +from smithy_http.aio.crt import AWSCRTHTTPClient +from smithy_http.interfaces import HTTPRequestConfiguration + +from ._private.schemas import ( + AMAZON_SAGE_MAKER_RUNTIME_HTTP2 as _SCHEMA_AMAZON_SAGE_MAKER_RUNTIME_HTTP2, +) +from .auth import HTTPAuthSchemeResolver +from .models import ( + InvokeEndpointWithBidirectionalStreamInput, + InvokeEndpointWithBidirectionalStreamOutput, +) + + +_ServiceInterceptor = Union[ + Interceptor[ + InvokeEndpointWithBidirectionalStreamInput, + InvokeEndpointWithBidirectionalStreamOutput, + Any, + Any, + ] +] + + +@dataclass(init=False) +class Config: + """Configuration for SageMaker Runtime HTTP2.""" + + auth_scheme_resolver: HTTPAuthSchemeResolver + auth_schemes: dict[ShapeID, AuthScheme[Any, Any, Any, Any]] + aws_access_key_id: str | None + aws_credentials_identity_resolver: ( + IdentityResolver[AWSCredentialsIdentity, AWSIdentityProperties] | None + ) + aws_secret_access_key: str | None + aws_session_token: str | None + endpoint_resolver: _EndpointResolver + endpoint_uri: str | URI | None + http_request_config: HTTPRequestConfiguration | None + interceptors: list[_ServiceInterceptor] + protocol: ClientProtocol[Any, Any] | None + region: str | None + retry_strategy: RetryStrategy | RetryStrategyOptions | None + sdk_ua_app_id: str | None + transport: ClientTransport[Any, Any] | None + user_agent_extra: str | None + + def __init__( + self, + *, + auth_scheme_resolver: HTTPAuthSchemeResolver | None = None, + auth_schemes: dict[ShapeID, AuthScheme[Any, Any, Any, Any]] | None = None, + aws_access_key_id: str | None = None, + aws_credentials_identity_resolver: IdentityResolver[ + AWSCredentialsIdentity, AWSIdentityProperties + ] + | None = None, + aws_secret_access_key: str | None = None, + aws_session_token: str | None = None, + endpoint_resolver: _EndpointResolver | None = None, + endpoint_uri: str | URI | None = None, + http_request_config: HTTPRequestConfiguration | None = None, + interceptors: list[_ServiceInterceptor] | None = None, + protocol: ClientProtocol[Any, Any] | None = None, + region: str | None = None, + retry_strategy: RetryStrategy | RetryStrategyOptions | None = None, + sdk_ua_app_id: str | None = None, + transport: ClientTransport[Any, Any] | None = None, + user_agent_extra: str | None = None, + ): + """ + Constructor. + + :param auth_scheme_resolver: + An auth scheme resolver that determines the auth scheme for each operation. + + :param auth_schemes: + A map of auth scheme ids to auth schemes. + + :param aws_access_key_id: + The identifier for a secret access key. + + :param aws_credentials_identity_resolver: + Resolves AWS Credentials. Required for operations that use Sigv4 Auth. + + :param aws_secret_access_key: + A secret access key that can be used to sign requests. + + :param aws_session_token: + An access key ID that identifies temporary security credentials. + + :param endpoint_resolver: + The endpoint resolver used to resolve the final endpoint per-operation based on + the configuration. + + :param endpoint_uri: + A static URI to route requests to. + + :param http_request_config: + Configuration for individual HTTP requests. + + :param interceptors: + The list of interceptors, which are hooks that are called during the execution + of a request. + + :param protocol: + The protocol to serialize and deserialize requests with. + + :param region: + The AWS region to connect to. The configured region is used to determine the + service endpoint. + + :param retry_strategy: + The retry strategy or options for configuring retry behavior. Can be either a + configured RetryStrategy or RetryStrategyOptions to create one. + + :param sdk_ua_app_id: + A unique and opaque application ID that is appended to the User-Agent header. + + :param transport: + The transport to use to send requests (e.g. an HTTP client). + + :param user_agent_extra: + Additional suffix to be added to the User-Agent header. + """ + self.auth_scheme_resolver = auth_scheme_resolver or HTTPAuthSchemeResolver() + self.auth_schemes = auth_schemes or { + ShapeID("aws.auth#sigv4"): SigV4AuthScheme(service="sagemaker") + } + self.aws_access_key_id = aws_access_key_id + self.aws_credentials_identity_resolver = aws_credentials_identity_resolver + self.aws_secret_access_key = aws_secret_access_key + self.aws_session_token = aws_session_token + self.endpoint_resolver = endpoint_resolver or _RegionalResolver( + endpoint_prefix="runtime.sagemaker" + ) + self.endpoint_uri = endpoint_uri + self.http_request_config = http_request_config + self.interceptors = interceptors or [] + self.protocol = protocol or RestJsonClientProtocol( + _SCHEMA_AMAZON_SAGE_MAKER_RUNTIME_HTTP2 + ) + self.region = region + self.retry_strategy = retry_strategy + self.sdk_ua_app_id = sdk_ua_app_id + self.transport = transport or AWSCRTHTTPClient() + self.user_agent_extra = user_agent_extra + + def set_auth_scheme(self, scheme: AuthScheme[Any, Any, Any, Any]) -> None: + """Sets the implementation of an auth scheme. + + Using this method ensures the correct key is used. + + :param scheme: The auth scheme to add. + """ + self.auth_schemes[scheme.scheme_id] = scheme + + +# +# A callable that allows customizing the config object on each request. +# +Plugin: TypeAlias = Callable[[Config], None] diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/models.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/models.py new file mode 100644 index 0000000..02539c6 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/models.py @@ -0,0 +1,970 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from dataclasses import dataclass, field +import logging +from typing import Any, Literal, Self, Union + +from smithy_core.deserializers import ShapeDeserializer +from smithy_core.documents import TypeRegistry +from smithy_core.exceptions import ModeledError, SerializationError +from smithy_core.schemas import APIOperation, Schema +from smithy_core.serializers import ShapeSerializer +from smithy_core.shapes import ShapeID + +from ._private.schemas import ( + INPUT_VALIDATION_ERROR as _SCHEMA_INPUT_VALIDATION_ERROR, + INTERNAL_SERVER_ERROR as _SCHEMA_INTERNAL_SERVER_ERROR, + INTERNAL_STREAM_FAILURE as _SCHEMA_INTERNAL_STREAM_FAILURE, + INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM as _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM, + INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT as _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT, + INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT as _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT, + MODEL_ERROR as _SCHEMA_MODEL_ERROR, + MODEL_STREAM_ERROR as _SCHEMA_MODEL_STREAM_ERROR, + REQUEST_PAYLOAD_PART as _SCHEMA_REQUEST_PAYLOAD_PART, + REQUEST_STREAM_EVENT as _SCHEMA_REQUEST_STREAM_EVENT, + RESPONSE_PAYLOAD_PART as _SCHEMA_RESPONSE_PAYLOAD_PART, + RESPONSE_STREAM_EVENT as _SCHEMA_RESPONSE_STREAM_EVENT, + SERVICE_UNAVAILABLE_ERROR as _SCHEMA_SERVICE_UNAVAILABLE_ERROR, +) + + +logger = logging.getLogger(__name__) + + +class ServiceError(ModeledError): + """Base error for all errors in the service. + + Some exceptions do not extend from this class, including + synthetic, implicit, and shared exception types. + """ + + +@dataclass(kw_only=True) +class InputValidationError(ServiceError): + """ + The input fails to satisfy the constraints specified by an AWS service. + """ + + fault: Literal["client", "server"] | None = "client" + + error_code: str | None = None + """ + Error code. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_INPUT_VALIDATION_ERROR, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.message is not None: + serializer.write_string( + _SCHEMA_INPUT_VALIDATION_ERROR.members["Message"], self.message + ) + + if self.error_code is not None: + serializer.write_string( + _SCHEMA_INPUT_VALIDATION_ERROR.members["ErrorCode"], self.error_code + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["message"] = de.read_string( + _SCHEMA_INPUT_VALIDATION_ERROR.members["Message"] + ) + + case 1: + kwargs["error_code"] = de.read_string( + _SCHEMA_INPUT_VALIDATION_ERROR.members["ErrorCode"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_INPUT_VALIDATION_ERROR, consumer=_consumer) + return kwargs + + +@dataclass(kw_only=True) +class InternalServerError(ServiceError): + """ + The request processing has failed because of an unknown error, exception or + failure. + """ + + fault: Literal["client", "server"] | None = "server" + + error_code: str | None = None + """ + Error code. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_INTERNAL_SERVER_ERROR, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.message is not None: + serializer.write_string( + _SCHEMA_INTERNAL_SERVER_ERROR.members["Message"], self.message + ) + + if self.error_code is not None: + serializer.write_string( + _SCHEMA_INTERNAL_SERVER_ERROR.members["ErrorCode"], self.error_code + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["message"] = de.read_string( + _SCHEMA_INTERNAL_SERVER_ERROR.members["Message"] + ) + + case 1: + kwargs["error_code"] = de.read_string( + _SCHEMA_INTERNAL_SERVER_ERROR.members["ErrorCode"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_INTERNAL_SERVER_ERROR, consumer=_consumer) + return kwargs + + +@dataclass(kw_only=True) +class InternalStreamFailure(ServiceError): + """ + Internal stream failure that occurs during streaming. + """ + + fault: Literal["client", "server"] | None = "server" + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_INTERNAL_STREAM_FAILURE, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.message is not None: + serializer.write_string( + _SCHEMA_INTERNAL_STREAM_FAILURE.members["Message"], self.message + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["message"] = de.read_string( + _SCHEMA_INTERNAL_STREAM_FAILURE.members["Message"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_INTERNAL_STREAM_FAILURE, consumer=_consumer) + return kwargs + + +@dataclass(kw_only=True) +class RequestPayloadPart: + """ + Request payload part structure. + """ + + bytes_: bytes | None = field(repr=False, default=None) + """ + The payload bytes. + """ + + data_type: str | None = None + """ + Data type header. Can be one of these possible values: "UTF8", "BINARY". + """ + + completion_state: str | None = None + """ + Completion state header. Can be one of these possible values: "PARTIAL", + "COMPLETE". + """ + + p: str | None = None + """ + Padding string for alignment. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_REQUEST_PAYLOAD_PART, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.bytes_ is not None: + serializer.write_blob( + _SCHEMA_REQUEST_PAYLOAD_PART.members["Bytes"], self.bytes_ + ) + + if self.data_type is not None: + serializer.write_string( + _SCHEMA_REQUEST_PAYLOAD_PART.members["DataType"], self.data_type + ) + + if self.completion_state is not None: + serializer.write_string( + _SCHEMA_REQUEST_PAYLOAD_PART.members["CompletionState"], + self.completion_state, + ) + + if self.p is not None: + serializer.write_string(_SCHEMA_REQUEST_PAYLOAD_PART.members["P"], self.p) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["bytes_"] = de.read_blob( + _SCHEMA_REQUEST_PAYLOAD_PART.members["Bytes"] + ) + + case 1: + kwargs["data_type"] = de.read_string( + _SCHEMA_REQUEST_PAYLOAD_PART.members["DataType"] + ) + + case 2: + kwargs["completion_state"] = de.read_string( + _SCHEMA_REQUEST_PAYLOAD_PART.members["CompletionState"] + ) + + case 3: + kwargs["p"] = de.read_string( + _SCHEMA_REQUEST_PAYLOAD_PART.members["P"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_REQUEST_PAYLOAD_PART, consumer=_consumer) + return kwargs + + +@dataclass +class RequestStreamEventPayloadPart: + """ + Payload part event. + """ + + value: RequestPayloadPart + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_REQUEST_STREAM_EVENT, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_REQUEST_STREAM_EVENT.members["PayloadPart"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=RequestPayloadPart.deserialize(deserializer)) + + +@dataclass +class RequestStreamEventUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to receive the + parsed value. + + This value may not be deliberately sent. + """ + + tag: str + + def serialize(self, serializer: ShapeSerializer): + raise SerializationError("Unknown union variants may not be serialized.") + + def serialize_members(self, serializer: ShapeSerializer): + raise SerializationError("Unknown union variants may not be serialized.") + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + raise NotImplementedError() + + +RequestStreamEvent = Union[RequestStreamEventPayloadPart | RequestStreamEventUnknown] + +""" +Request stream event union. +""" + + +class _RequestStreamEventDeserializer: + _result: RequestStreamEvent | None = None + + def deserialize(self, deserializer: ShapeDeserializer) -> RequestStreamEvent: + self._result = None + deserializer.read_struct(_SCHEMA_REQUEST_STREAM_EVENT, self._consumer) + + if self._result is None: + raise SerializationError( + "Unions must have exactly one value, but found none." + ) + + return self._result + + def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + self._set_result(RequestStreamEventPayloadPart.deserialize(de)) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + def _set_result(self, value: RequestStreamEvent) -> None: + if self._result is not None: + raise SerializationError( + "Unions must have exactly one value, but found more than one." + ) + self._result = value + + +@dataclass(kw_only=True) +class InvokeEndpointWithBidirectionalStreamInput: + endpoint_name: str | None = None + """ + The name of the endpoint to invoke. + """ + + target_variant: str | None = None + """ + Target variant for the request. + """ + + model_invocation_path: str | None = None + """ + Model invocation path. + """ + + model_query_string: str | None = None + """ + Model query string. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT, self + ) + + def serialize_members(self, serializer: ShapeSerializer): + if self.endpoint_name is not None: + serializer.write_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "EndpointName" + ], + self.endpoint_name, + ) + + if self.target_variant is not None: + serializer.write_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "TargetVariant" + ], + self.target_variant, + ) + + if self.model_invocation_path is not None: + serializer.write_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "ModelInvocationPath" + ], + self.model_invocation_path, + ) + + if self.model_query_string is not None: + serializer.write_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "ModelQueryString" + ], + self.model_query_string, + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["endpoint_name"] = de.read_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "EndpointName" + ] + ) + + case 2: + kwargs["target_variant"] = de.read_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "TargetVariant" + ] + ) + + case 3: + kwargs["model_invocation_path"] = de.read_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "ModelInvocationPath" + ] + ) + + case 4: + kwargs["model_query_string"] = de.read_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT.members[ + "ModelQueryString" + ] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT, consumer=_consumer + ) + return kwargs + + +@dataclass(kw_only=True) +class ModelStreamError(ServiceError): + """ + Model stream error that occurs during streaming. + """ + + fault: Literal["client", "server"] | None = "client" + + error_code: str | None = None + """ + Error code. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_MODEL_STREAM_ERROR, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.message is not None: + serializer.write_string( + _SCHEMA_MODEL_STREAM_ERROR.members["Message"], self.message + ) + + if self.error_code is not None: + serializer.write_string( + _SCHEMA_MODEL_STREAM_ERROR.members["ErrorCode"], self.error_code + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["message"] = de.read_string( + _SCHEMA_MODEL_STREAM_ERROR.members["Message"] + ) + + case 1: + kwargs["error_code"] = de.read_string( + _SCHEMA_MODEL_STREAM_ERROR.members["ErrorCode"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_MODEL_STREAM_ERROR, consumer=_consumer) + return kwargs + + +@dataclass(kw_only=True) +class ResponsePayloadPart: + """ + Response payload part structure. + """ + + bytes_: bytes | None = field(repr=False, default=None) + """ + The payload bytes. + """ + + data_type: str | None = None + """ + Data type header. Can be one of these possible values: "UTF8", "BINARY". + """ + + completion_state: str | None = None + """ + Completion state header. Can be one of these possible values: "PARTIAL", + "COMPLETE". + """ + + p: str | None = None + """ + Padding string for alignment. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_RESPONSE_PAYLOAD_PART, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.bytes_ is not None: + serializer.write_blob( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["Bytes"], self.bytes_ + ) + + if self.data_type is not None: + serializer.write_string( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["DataType"], self.data_type + ) + + if self.completion_state is not None: + serializer.write_string( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["CompletionState"], + self.completion_state, + ) + + if self.p is not None: + serializer.write_string(_SCHEMA_RESPONSE_PAYLOAD_PART.members["P"], self.p) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["bytes_"] = de.read_blob( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["Bytes"] + ) + + case 1: + kwargs["data_type"] = de.read_string( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["DataType"] + ) + + case 2: + kwargs["completion_state"] = de.read_string( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["CompletionState"] + ) + + case 3: + kwargs["p"] = de.read_string( + _SCHEMA_RESPONSE_PAYLOAD_PART.members["P"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_RESPONSE_PAYLOAD_PART, consumer=_consumer) + return kwargs + + +@dataclass +class ResponseStreamEventPayloadPart: + """ + Payload part event. + """ + + value: ResponsePayloadPart + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_RESPONSE_STREAM_EVENT, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_RESPONSE_STREAM_EVENT.members["PayloadPart"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=ResponsePayloadPart.deserialize(deserializer)) + + +@dataclass +class ResponseStreamEventModelStreamError: + """ + Model stream error event. + """ + + value: ModelStreamError + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_RESPONSE_STREAM_EVENT, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_RESPONSE_STREAM_EVENT.members["ModelStreamError"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=ModelStreamError.deserialize(deserializer)) + + +@dataclass +class ResponseStreamEventInternalStreamFailure: + """ + Internal stream failure event. + """ + + value: InternalStreamFailure + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_RESPONSE_STREAM_EVENT, self) + + def serialize_members(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_RESPONSE_STREAM_EVENT.members["InternalStreamFailure"], self.value + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(value=InternalStreamFailure.deserialize(deserializer)) + + +@dataclass +class ResponseStreamEventUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to receive the + parsed value. + + This value may not be deliberately sent. + """ + + tag: str + + def serialize(self, serializer: ShapeSerializer): + raise SerializationError("Unknown union variants may not be serialized.") + + def serialize_members(self, serializer: ShapeSerializer): + raise SerializationError("Unknown union variants may not be serialized.") + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + raise NotImplementedError() + + +ResponseStreamEvent = Union[ + ResponseStreamEventPayloadPart + | ResponseStreamEventModelStreamError + | ResponseStreamEventInternalStreamFailure + | ResponseStreamEventUnknown +] + +""" +Response stream event union. +""" + + +class _ResponseStreamEventDeserializer: + _result: ResponseStreamEvent | None = None + + def deserialize(self, deserializer: ShapeDeserializer) -> ResponseStreamEvent: + self._result = None + deserializer.read_struct(_SCHEMA_RESPONSE_STREAM_EVENT, self._consumer) + + if self._result is None: + raise SerializationError( + "Unions must have exactly one value, but found none." + ) + + return self._result + + def _consumer(self, schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + self._set_result(ResponseStreamEventPayloadPart.deserialize(de)) + + case 1: + self._set_result(ResponseStreamEventModelStreamError.deserialize(de)) + + case 2: + self._set_result( + ResponseStreamEventInternalStreamFailure.deserialize(de) + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + def _set_result(self, value: ResponseStreamEvent) -> None: + if self._result is not None: + raise SerializationError( + "Unions must have exactly one value, but found more than one." + ) + self._result = value + + +@dataclass(kw_only=True) +class InvokeEndpointWithBidirectionalStreamOutput: + invoked_production_variant: str | None = None + """ + The invoked production variant. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT, self + ) + + def serialize_members(self, serializer: ShapeSerializer): + if self.invoked_production_variant is not None: + serializer.write_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT.members[ + "InvokedProductionVariant" + ], + self.invoked_production_variant, + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 1: + kwargs["invoked_production_variant"] = de.read_string( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT.members[ + "InvokedProductionVariant" + ] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct( + _SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT, consumer=_consumer + ) + return kwargs + + +@dataclass(kw_only=True) +class ModelError(ServiceError): + """ + An error occurred while processing the model. + """ + + fault: Literal["client", "server"] | None = "client" + + original_status_code: int | None = None + """ + HTTP status code returned by model. + """ + + original_message: str | None = None + """ + Original error message from the model. + """ + + log_stream_arn: str | None = None + """ + CloudWatch log stream ARN. + """ + + error_code: str | None = None + """ + Error code. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_MODEL_ERROR, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.message is not None: + serializer.write_string( + _SCHEMA_MODEL_ERROR.members["Message"], self.message + ) + + if self.original_status_code is not None: + serializer.write_integer( + _SCHEMA_MODEL_ERROR.members["OriginalStatusCode"], + self.original_status_code, + ) + + if self.original_message is not None: + serializer.write_string( + _SCHEMA_MODEL_ERROR.members["OriginalMessage"], self.original_message + ) + + if self.log_stream_arn is not None: + serializer.write_string( + _SCHEMA_MODEL_ERROR.members["LogStreamArn"], self.log_stream_arn + ) + + if self.error_code is not None: + serializer.write_string( + _SCHEMA_MODEL_ERROR.members["ErrorCode"], self.error_code + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["message"] = de.read_string( + _SCHEMA_MODEL_ERROR.members["Message"] + ) + + case 1: + kwargs["original_status_code"] = de.read_integer( + _SCHEMA_MODEL_ERROR.members["OriginalStatusCode"] + ) + + case 2: + kwargs["original_message"] = de.read_string( + _SCHEMA_MODEL_ERROR.members["OriginalMessage"] + ) + + case 3: + kwargs["log_stream_arn"] = de.read_string( + _SCHEMA_MODEL_ERROR.members["LogStreamArn"] + ) + + case 4: + kwargs["error_code"] = de.read_string( + _SCHEMA_MODEL_ERROR.members["ErrorCode"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_MODEL_ERROR, consumer=_consumer) + return kwargs + + +@dataclass(kw_only=True) +class ServiceUnavailableError(ServiceError): + """ + The request has failed due to a temporary failure of the server. + """ + + fault: Literal["client", "server"] | None = "server" + + error_code: str | None = None + """ + Error code. + """ + + def serialize(self, serializer: ShapeSerializer): + serializer.write_struct(_SCHEMA_SERVICE_UNAVAILABLE_ERROR, self) + + def serialize_members(self, serializer: ShapeSerializer): + if self.message is not None: + serializer.write_string( + _SCHEMA_SERVICE_UNAVAILABLE_ERROR.members["Message"], self.message + ) + + if self.error_code is not None: + serializer.write_string( + _SCHEMA_SERVICE_UNAVAILABLE_ERROR.members["ErrorCode"], self.error_code + ) + + @classmethod + def deserialize(cls, deserializer: ShapeDeserializer) -> Self: + return cls(**cls.deserialize_kwargs(deserializer)) + + @classmethod + def deserialize_kwargs(cls, deserializer: ShapeDeserializer) -> dict[str, Any]: + kwargs: dict[str, Any] = {} + + def _consumer(schema: Schema, de: ShapeDeserializer) -> None: + match schema.expect_member_index(): + case 0: + kwargs["message"] = de.read_string( + _SCHEMA_SERVICE_UNAVAILABLE_ERROR.members["Message"] + ) + + case 1: + kwargs["error_code"] = de.read_string( + _SCHEMA_SERVICE_UNAVAILABLE_ERROR.members["ErrorCode"] + ) + + case _: + logger.debug("Unexpected member schema: %s", schema) + + deserializer.read_struct(_SCHEMA_SERVICE_UNAVAILABLE_ERROR, consumer=_consumer) + return kwargs + + +INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM = APIOperation( + input=InvokeEndpointWithBidirectionalStreamInput, + output=InvokeEndpointWithBidirectionalStreamOutput, + schema=_SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM, + input_schema=_SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_INPUT, + output_schema=_SCHEMA_INVOKE_ENDPOINT_WITH_BIDIRECTIONAL_STREAM_OUTPUT, + error_registry=TypeRegistry( + { + ShapeID( + "com.amazonaws.sagemakerruntimehttp2#InputValidationError" + ): InputValidationError, + ShapeID( + "com.amazonaws.sagemakerruntimehttp2#InternalServerError" + ): InternalServerError, + ShapeID( + "com.amazonaws.sagemakerruntimehttp2#InternalStreamFailure" + ): InternalStreamFailure, + ShapeID("com.amazonaws.sagemakerruntimehttp2#ModelError"): ModelError, + ShapeID( + "com.amazonaws.sagemakerruntimehttp2#ModelStreamError" + ): ModelStreamError, + ShapeID( + "com.amazonaws.sagemakerruntimehttp2#ServiceUnavailableError" + ): ServiceUnavailableError, + } + ), + effective_auth_schemes=[ShapeID("aws.auth#sigv4")], +) diff --git a/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/user_agent.py b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/user_agent.py new file mode 100644 index 0000000..643af39 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/src/aws_sdk_sagemaker_runtime_http2/user_agent.py @@ -0,0 +1,17 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from smithy_aws_core.interceptors.user_agent import UserAgentInterceptor + +from . import __version__ +from .config import Config + + +def aws_user_agent_plugin(config: Config): + config.interceptors.append( + UserAgentInterceptor( + ua_suffix=config.user_agent_extra, + ua_app_id=config.sdk_ua_app_id, + sdk_version=__version__, + service_id="SageMaker_Runtime_HTTP2", + ) + ) diff --git a/clients/aws-sdk-bedrock-runtime/docs/models/__init__.py b/clients/aws-sdk-sagemaker-runtime-http2/tests/__init__.py similarity index 100% rename from clients/aws-sdk-bedrock-runtime/docs/models/__init__.py rename to clients/aws-sdk-sagemaker-runtime-http2/tests/__init__.py diff --git a/clients/aws-sdk-sagemaker-runtime-http2/tests/test_protocol.py b/clients/aws-sdk-sagemaker-runtime-http2/tests/test_protocol.py new file mode 100644 index 0000000..f8f66e0 --- /dev/null +++ b/clients/aws-sdk-sagemaker-runtime-http2/tests/test_protocol.py @@ -0,0 +1,64 @@ +# Code generated by smithy-python-codegen DO NOT EDIT. + +from smithy_core.aio.utils import async_list +from smithy_http import tuples_to_fields +from smithy_http.aio import HTTPResponse as _HTTPResponse +from smithy_http.aio.interfaces import HTTPRequest, HTTPResponse +from smithy_http.interfaces import HTTPClientConfiguration, HTTPRequestConfiguration + +from aws_sdk_sagemaker_runtime_http2.models import ServiceError + + +class TestHttpServiceError(ServiceError): + """A test error that subclasses the service-error for protocol tests.""" + + def __init__(self, request: HTTPRequest): + self.request = request + + +class RequestTestHTTPClient: + """An asynchronous HTTP client solely for testing purposes.""" + + TIMEOUT_EXCEPTIONS = () + + def __init__(self, *, client_config: HTTPClientConfiguration | None = None): + self._client_config = client_config + + async def send( + self, + request: HTTPRequest, + *, + request_config: HTTPRequestConfiguration | None = None, + ) -> HTTPResponse: + # Raise the exception with the request object to bypass actual request handling + raise TestHttpServiceError(request) + + +class ResponseTestHTTPClient: + """An asynchronous HTTP client solely for testing purposes.""" + + TIMEOUT_EXCEPTIONS = () + + def __init__( + self, + *, + client_config: HTTPClientConfiguration | None = None, + status: int = 200, + headers: list[tuple[str, str]] | None = None, + body: bytes = b"", + ): + self._client_config = client_config + self.status = status + self.fields = tuples_to_fields(headers or []) + self.body = body + + async def send( + self, + request: HTTPRequest, + *, + request_config: HTTPRequestConfiguration | None = None, + ) -> _HTTPResponse: + # Pre-construct the response from the request and return it + return _HTTPResponse( + status=self.status, fields=self.fields, body=async_list([self.body]) + ) diff --git a/clients/aws-sdk-transcribe-streaming/CHANGELOG.md b/clients/aws-sdk-transcribe-streaming/CHANGELOG.md index 44e0818..e2ec005 100644 --- a/clients/aws-sdk-transcribe-streaming/CHANGELOG.md +++ b/clients/aws-sdk-transcribe-streaming/CHANGELOG.md @@ -2,6 +2,19 @@ ## Unreleased +## v0.2.0 + +### API Changes +* This release adds support for additional locales in AWS transcribe streaming. + +### Enhancements +* Add Standard Retry Mode + +### Dependencies +* **Updated**: `smithy_aws_core[eventstream, json]` from `~=0.1.0` to `~=0.2.0`. +* **Updated**: `smithy_core` from `~=0.1.0` to `~=0.2.0`. +* **Updated**: `smithy_http[awscrt]~=0.3.0` from `~=0.2.0` to `~=0.3.0`. + ## v0.1.0 ### Features diff --git a/clients/aws-sdk-transcribe-streaming/docs/conf.py b/clients/aws-sdk-transcribe-streaming/docs/conf.py index c6b9c11..e746b4c 100644 --- a/clients/aws-sdk-transcribe-streaming/docs/conf.py +++ b/clients/aws-sdk-transcribe-streaming/docs/conf.py @@ -7,7 +7,7 @@ project = "Amazon Transcribe Streaming Service" author = "Amazon Web Services" -release = "0.0.1" +release = "0.2.0" extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] diff --git a/clients/aws-sdk-transcribe-streaming/pyproject.toml b/clients/aws-sdk-transcribe-streaming/pyproject.toml index 116e150..17e258a 100644 --- a/clients/aws-sdk-transcribe-streaming/pyproject.toml +++ b/clients/aws-sdk-transcribe-streaming/pyproject.toml @@ -3,7 +3,7 @@ [project] name = "aws_sdk_transcribe_streaming" -version = "0.1.0" +version = "0.2.0" description = "aws_sdk_transcribe_streaming client" readme = "README.md" requires-python = ">=3.12" @@ -24,9 +24,9 @@ classifiers = [ ] dependencies = [ - "smithy_aws_core[eventstream, json]~=0.1.0", - "smithy_core~=0.1.0", - "smithy_http[awscrt]~=0.2.0" + "smithy_aws_core[eventstream, json]~=0.2.0", + "smithy_core~=0.2.0", + "smithy_http[awscrt]~=0.3.0" ] [dependency-groups] diff --git a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/__init__.py b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/__init__.py index e1ee049..c8417da 100644 --- a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/__init__.py +++ b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/__init__.py @@ -1,3 +1,3 @@ # Code generated by smithy-python-codegen DO NOT EDIT. -__version__: str = "0.1.0" +__version__: str = "0.2.0" diff --git a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/_private/schemas.py b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/_private/schemas.py index 0cfb2aa..57841b8 100644 --- a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/_private/schemas.py +++ b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/_private/schemas.py @@ -1241,6 +1241,186 @@ "target": UNIT, "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="zu-ZA")], }, + "AM_ET": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="am-ET")], + }, + "BE_BY": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="be-BY")], + }, + "BG_BG": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="bg-BG")], + }, + "BN_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="bn-IN")], + }, + "BS_BA": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="bs-BA")], + }, + "CKB_IQ": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ckb-IQ")], + }, + "CKB_IR": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ckb-IR")], + }, + "CY_WL": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="cy-WL")], + }, + "ES_MX": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="es-MX")], + }, + "ET_ET": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="et-ET")], + }, + "FA_AF": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="fa-AF")], + }, + "GU_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="gu-IN")], + }, + "HT_HT": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ht-HT")], + }, + "HU_HU": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="hu-HU")], + }, + "HY_AM": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="hy-AM")], + }, + "IS_IS": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="is-IS")], + }, + "JV_ID": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="jv-ID")], + }, + "KA_GE": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ka-GE")], + }, + "KAB_DZ": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="kab-DZ")], + }, + "KK_KZ": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="kk-KZ")], + }, + "KM_KH": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="km-KH")], + }, + "KN_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="kn-IN")], + }, + "LG_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="lg-IN")], + }, + "LT_LT": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="lt-LT")], + }, + "MK_MK": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="mk-MK")], + }, + "ML_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ml-IN")], + }, + "MR_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="mr-IN")], + }, + "MY_MM": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="my-MM")], + }, + "NE_NP": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ne-NP")], + }, + "OR_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="or-IN")], + }, + "PA_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="pa-IN")], + }, + "PS_AF": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ps-AF")], + }, + "SI_LK": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="si-LK")], + }, + "SL_SI": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sl-SI")], + }, + "SQ_AL": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sq-AL")], + }, + "SU_ID": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="su-ID")], + }, + "SW_BI": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sw-BI")], + }, + "SW_KE": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sw-KE")], + }, + "SW_RW": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sw-RW")], + }, + "SW_TZ": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sw-TZ")], + }, + "SW_UG": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="sw-UG")], + }, + "TA_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="ta-IN")], + }, + "TE_IN": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="te-IN")], + }, + "TR_TR": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="tr-TR")], + }, + "UZ_UZ": { + "target": UNIT, + "traits": [Trait.new(id=ShapeID("smithy.api#enumValue"), value="uz-UZ")], + }, }, ) diff --git a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/client.py b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/client.py index 9c56080..d8155f9 100644 --- a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/client.py +++ b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/client.py @@ -7,6 +7,8 @@ from smithy_core.aio.eventstream import DuplexEventStream from smithy_core.exceptions import ExpectationNotMetError from smithy_core.interceptors import InterceptorChain +from smithy_core.interfaces.retries import RetryStrategy +from smithy_core.retries import RetryStrategyOptions, RetryStrategyResolver from smithy_core.types import TypedProperties from smithy_http.plugins import user_agent_plugin @@ -75,6 +77,8 @@ def __init__( for plugin in client_plugins: plugin(self._config) + self._retry_strategy_resolver = RetryStrategyResolver() + async def get_medical_scribe_stream( self, input: GetMedicalScribeStreamInput, plugins: list[Plugin] | None = None ) -> GetMedicalScribeStreamOutput: @@ -100,6 +104,24 @@ async def get_medical_scribe_stream( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -109,7 +131,7 @@ async def get_medical_scribe_stream( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline(call) @@ -156,6 +178,24 @@ async def start_call_analytics_stream_transcription( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -165,7 +205,7 @@ async def start_call_analytics_stream_transcription( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.duplex_stream( @@ -236,6 +276,24 @@ async def start_medical_scribe_stream( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -245,7 +303,7 @@ async def start_medical_scribe_stream( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.duplex_stream( @@ -297,6 +355,24 @@ async def start_medical_stream_transcription( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -306,7 +382,7 @@ async def start_medical_stream_transcription( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.duplex_stream( @@ -353,6 +429,24 @@ async def start_stream_transcription( raise ExpectationNotMetError( "protocol and transport MUST be set on the config to make calls." ) + + # Resolve retry strategy from config + if isinstance(config.retry_strategy, RetryStrategy): + retry_strategy = config.retry_strategy + elif isinstance(config.retry_strategy, RetryStrategyOptions): + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=config.retry_strategy + ) + elif config.retry_strategy is None: + retry_strategy = await self._retry_strategy_resolver.resolve_retry_strategy( + options=RetryStrategyOptions() + ) + else: + raise TypeError( + f"retry_strategy must be RetryStrategy, RetryStrategyOptions, or None, " + f"got {type(config.retry_strategy).__name__}" + ) + pipeline = RequestPipeline(protocol=config.protocol, transport=config.transport) call = ClientCall( input=input, @@ -362,7 +456,7 @@ async def start_stream_transcription( auth_scheme_resolver=config.auth_scheme_resolver, supported_auth_schemes=config.auth_schemes, endpoint_resolver=config.endpoint_resolver, - retry_strategy=config.retry_strategy, + retry_strategy=retry_strategy, ) return await pipeline.duplex_stream( diff --git a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/config.py b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/config.py index 2685f7b..054c513 100644 --- a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/config.py +++ b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/config.py @@ -19,7 +19,7 @@ from smithy_core.interceptors import Interceptor from smithy_core.interfaces import URI from smithy_core.interfaces.retries import RetryStrategy -from smithy_core.retries import SimpleRetryStrategy +from smithy_core.retries import RetryStrategyOptions from smithy_core.shapes import ShapeID from smithy_http.aio.crt import AWSCRTHTTPClient from smithy_http.interfaces import HTTPRequestConfiguration @@ -81,7 +81,7 @@ class Config: interceptors: list[_ServiceInterceptor] protocol: ClientProtocol[Any, Any] | None region: str | None - retry_strategy: RetryStrategy + retry_strategy: RetryStrategy | RetryStrategyOptions | None sdk_ua_app_id: str | None transport: ClientTransport[Any, Any] | None user_agent_extra: str | None @@ -104,7 +104,7 @@ def __init__( interceptors: list[_ServiceInterceptor] | None = None, protocol: ClientProtocol[Any, Any] | None = None, region: str | None = None, - retry_strategy: RetryStrategy | None = None, + retry_strategy: RetryStrategy | RetryStrategyOptions | None = None, sdk_ua_app_id: str | None = None, transport: ClientTransport[Any, Any] | None = None, user_agent_extra: str | None = None, @@ -152,7 +152,8 @@ def __init__( service endpoint. :param retry_strategy: - The retry strategy for issuing retry tokens and computing retry delays. + The retry strategy or options for configuring retry behavior. Can be either a + configured RetryStrategy or RetryStrategyOptions to create one. :param sdk_ua_app_id: A unique and opaque application ID that is appended to the User-Agent header. @@ -179,7 +180,7 @@ def __init__( self.interceptors = interceptors or [] self.protocol = protocol or RestJsonClientProtocol(_SCHEMA_TRANSCRIBE) self.region = region - self.retry_strategy = retry_strategy or SimpleRetryStrategy() + self.retry_strategy = retry_strategy self.sdk_ua_app_id = sdk_ua_app_id self.transport = transport or AWSCRTHTTPClient() self.user_agent_extra = user_agent_extra diff --git a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/models.py b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/models.py index 446f210..be1d07f 100644 --- a/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/models.py +++ b/clients/aws-sdk-transcribe-streaming/src/aws_sdk_transcribe_streaming/models.py @@ -3622,6 +3622,51 @@ class LanguageCode(StrEnum): UK_UA = "uk-UA" VI_VN = "vi-VN" ZU_ZA = "zu-ZA" + AM_ET = "am-ET" + BE_BY = "be-BY" + BG_BG = "bg-BG" + BN_IN = "bn-IN" + BS_BA = "bs-BA" + CKB_IQ = "ckb-IQ" + CKB_IR = "ckb-IR" + CY_WL = "cy-WL" + ES_MX = "es-MX" + ET_ET = "et-ET" + FA_AF = "fa-AF" + GU_IN = "gu-IN" + HT_HT = "ht-HT" + HU_HU = "hu-HU" + HY_AM = "hy-AM" + IS_IS = "is-IS" + JV_ID = "jv-ID" + KA_GE = "ka-GE" + KAB_DZ = "kab-DZ" + KK_KZ = "kk-KZ" + KM_KH = "km-KH" + KN_IN = "kn-IN" + LG_IN = "lg-IN" + LT_LT = "lt-LT" + MK_MK = "mk-MK" + ML_IN = "ml-IN" + MR_IN = "mr-IN" + MY_MM = "my-MM" + NE_NP = "ne-NP" + OR_IN = "or-IN" + PA_IN = "pa-IN" + PS_AF = "ps-AF" + SI_LK = "si-LK" + SL_SI = "sl-SI" + SQ_AL = "sq-AL" + SU_ID = "su-ID" + SW_BI = "sw-BI" + SW_KE = "sw-KE" + SW_RW = "sw-RW" + SW_TZ = "sw-TZ" + SW_UG = "sw-UG" + TA_IN = "ta-IN" + TE_IN = "te-IN" + TR_TR = "tr-TR" + UZ_UZ = "uz-UZ" @dataclass(kw_only=True) diff --git a/clients/aws-sdk-transcribe-streaming/tests/test_protocol.py b/clients/aws-sdk-transcribe-streaming/tests/test_protocol.py index 8d3d501..484ba2c 100644 --- a/clients/aws-sdk-transcribe-streaming/tests/test_protocol.py +++ b/clients/aws-sdk-transcribe-streaming/tests/test_protocol.py @@ -19,6 +19,8 @@ def __init__(self, request: HTTPRequest): class RequestTestHTTPClient: """An asynchronous HTTP client solely for testing purposes.""" + TIMEOUT_EXCEPTIONS = () + def __init__(self, *, client_config: HTTPClientConfiguration | None = None): self._client_config = client_config @@ -35,6 +37,8 @@ async def send( class ResponseTestHTTPClient: """An asynchronous HTTP client solely for testing purposes.""" + TIMEOUT_EXCEPTIONS = () + def __init__( self, *, diff --git a/codegen/aws-models/bedrock-runtime.json b/codegen/aws-models/bedrock-runtime.json index 5036109..c19a306 100644 --- a/codegen/aws-models/bedrock-runtime.json +++ b/codegen/aws-models/bedrock-runtime.json @@ -87,27 +87,27 @@ "builtIn": "AWS::Region", "required": false, "documentation": "The AWS region used to dispatch the request.", - "type": "String" + "type": "string" }, "UseDualStack": { "builtIn": "AWS::UseDualStack", "required": true, "default": false, "documentation": "When true, use the dual-stack endpoint. If the configured endpoint does not support dual-stack, dispatching the request MAY return an error.", - "type": "Boolean" + "type": "boolean" }, "UseFIPS": { "builtIn": "AWS::UseFIPS", "required": true, "default": false, "documentation": "When true, send this request to the FIPS-compliant regional endpoint. If the configured endpoint does not have a FIPS compliant endpoint, dispatching the request will return an error.", - "type": "Boolean" + "type": "boolean" }, "Endpoint": { "builtIn": "SDK::Endpoint", "required": false, "documentation": "Override the endpoint used to send this request", - "type": "String" + "type": "string" } }, "rules": [ @@ -706,7 +706,45 @@ "type": "structure", "members": {}, "traits": { - "smithy.api#documentation": "

The model must request at least one tool (no text is generated). For example, {\"any\" : {}}.

" + "smithy.api#documentation": "

The model must request at least one tool (no text is generated). For example, {\"any\" : {}}. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" + } + }, + "com.amazonaws.bedrockruntime#AppliedGuardrailDetails": { + "type": "structure", + "members": { + "guardrailId": { + "target": "com.amazonaws.bedrockruntime#GuardrailId", + "traits": { + "smithy.api#documentation": "

The unique ID of the guardrail that was applied.

" + } + }, + "guardrailVersion": { + "target": "com.amazonaws.bedrockruntime#GuardrailVersion", + "traits": { + "smithy.api#documentation": "

The version of the guardrail that was applied.

" + } + }, + "guardrailArn": { + "target": "com.amazonaws.bedrockruntime#GuardrailArn", + "traits": { + "smithy.api#documentation": "

The ARN of the guardrail that was applied.

" + } + }, + "guardrailOrigin": { + "target": "com.amazonaws.bedrockruntime#GuardrailOriginList", + "traits": { + "smithy.api#documentation": "

The origin of how the guardrail was applied. This can be either requested at the API level or enforced at the account or organization level as a default guardrail.

" + } + }, + "guardrailOwnership": { + "target": "com.amazonaws.bedrockruntime#GuardrailOwnership", + "traits": { + "smithy.api#documentation": "

The ownership type of the guardrail, indicating whether it is owned by the requesting account or is a cross-account guardrail shared from another AWS account.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Details about the specific guardrail that was applied during this assessment, including its identifier, version, ARN, origin, and ownership information.

" } }, "com.amazonaws.bedrockruntime#ApplyGuardrail": { @@ -1034,7 +1072,7 @@ "type": "structure", "members": {}, "traits": { - "smithy.api#documentation": "

The Model automatically decides if a tool should be called or whether to generate text instead. For example, {\"auto\" : {}}.

" + "smithy.api#documentation": "

The Model automatically decides if a tool should be called or whether to generate text instead. For example, {\"auto\" : {}}. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide

" } }, "com.amazonaws.bedrockruntime#AutomatedReasoningRuleIdentifier": { @@ -1120,6 +1158,12 @@ "smithy.api#documentation": "

The title or identifier of the source document being cited.

" } }, + "source": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The source from the original search result that provided the cited content.

" + } + }, "sourceContent": { "target": "com.amazonaws.bedrockruntime#CitationSourceContentList", "traits": { @@ -1160,6 +1204,12 @@ "com.amazonaws.bedrockruntime#CitationLocation": { "type": "union", "members": { + "web": { + "target": "com.amazonaws.bedrockruntime#WebLocation", + "traits": { + "smithy.api#documentation": "

The web URL that was cited for this reference.

" + } + }, "documentChar": { "target": "com.amazonaws.bedrockruntime#DocumentCharLocation", "traits": { @@ -1177,6 +1227,12 @@ "traits": { "smithy.api#documentation": "

The chunk-level location within the document where the cited content is found, typically used for documents that have been segmented into logical chunks.

" } + }, + "searchResultLocation": { + "target": "com.amazonaws.bedrockruntime#SearchResultLocation", + "traits": { + "smithy.api#documentation": "

The search result location where the cited content is found, including the search result index and block positions within the content array.

" + } } }, "traits": { @@ -1235,7 +1291,7 @@ "enabled": { "target": "smithy.api#Boolean", "traits": { - "smithy.api#documentation": "

Specifies whether document citations should be included in the model's response. When set to true, the model can generate citations that reference the source documents used to inform the response.

", + "smithy.api#documentation": "

Specifies whether citations from the selected document should be used in the model's response. When set to true, the model can generate citations that reference the source documents used to inform the response.

", "smithy.api#required": {} } } @@ -1273,6 +1329,12 @@ "smithy.api#documentation": "

The title or identifier of the source document being cited.

" } }, + "source": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The source from the original search result that provided the cited content.

" + } + }, "sourceContent": { "target": "com.amazonaws.bedrockruntime#CitationSourceContentListDelta", "traits": { @@ -1345,7 +1407,7 @@ "guardContent": { "target": "com.amazonaws.bedrockruntime#GuardrailConverseContentBlock", "traits": { - "smithy.api#documentation": "

Contains the content to assess with the guardrail. If you don't specify guardContent in a call to the Converse API, the guardrail (if passed in the Converse API) assesses the entire message.

For more information, see Use a guardrail with the Converse API in the Amazon Bedrock User Guide.

" + "smithy.api#documentation": "

Contains the content to assess with the guardrail. If you don't specify guardContent in a call to the Converse API, the guardrail (if passed in the Converse API) assesses the entire message.

For more information, see Use a guardrail with the Converse API in the Amazon Bedrock User Guide.

" } }, "cachePoint": { @@ -1365,6 +1427,12 @@ "traits": { "smithy.api#documentation": "

A content block that contains both generated text and associated citation information, providing traceability between the response and source documents.

" } + }, + "searchResult": { + "target": "com.amazonaws.bedrockruntime#SearchResultBlock", + "traits": { + "smithy.api#documentation": "

Search result to include in the message.

" + } } }, "traits": { @@ -1386,6 +1454,12 @@ "smithy.api#documentation": "

Information about a tool that the model is requesting to use.

" } }, + "toolResult": { + "target": "com.amazonaws.bedrockruntime#ToolResultBlocksDelta", + "traits": { + "smithy.api#documentation": "

An incremental update that contains the results from a tool call.

" + } + }, "reasoningContent": { "target": "com.amazonaws.bedrockruntime#ReasoningContentBlockDelta", "traits": { @@ -1433,6 +1507,12 @@ "traits": { "smithy.api#documentation": "

Information about a tool that the model is requesting to use.

" } + }, + "toolResult": { + "target": "com.amazonaws.bedrockruntime#ToolResultBlockStart", + "traits": { + "smithy.api#documentation": "

The

" + } } }, "traits": { @@ -1657,6 +1737,12 @@ "traits": { "smithy.api#documentation": "

Model performance settings for the request.

" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTier", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier configuration used for serving the request.

" + } } }, "traits": { @@ -1711,6 +1797,12 @@ "traits": { "smithy.api#documentation": "

Model performance settings for the request.

" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTier", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier configuration used for serving the request.

" + } } }, "traits": { @@ -1791,6 +1883,12 @@ "traits": { "smithy.api#documentation": "

Model performance configuration metadata for the conversation stream event.

" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTier", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier configuration used for serving the request.

" + } } }, "traits": { @@ -1866,13 +1964,13 @@ "validationException": { "target": "com.amazonaws.bedrockruntime#ValidationException", "traits": { - "smithy.api#documentation": "

The input fails to satisfy the constraints specified by Amazon Bedrock. For troubleshooting this error, see ValidationError in the Amazon Bedrock User Guide

" + "smithy.api#documentation": "

The input fails to satisfy the constraints specified by Amazon Bedrock. For troubleshooting this error, see ValidationError in the Amazon Bedrock User Guide.

" } }, "throttlingException": { "target": "com.amazonaws.bedrockruntime#ThrottlingException", "traits": { - "smithy.api#documentation": "

Your request was denied due to exceeding the account quotas for Amazon Bedrock. For troubleshooting this error, see ThrottlingException in the Amazon Bedrock User Guide

" + "smithy.api#documentation": "

Your request was denied due to exceeding the account quotas for Amazon Bedrock. For troubleshooting this error, see ThrottlingException in the Amazon Bedrock User Guide.

" } }, "serviceUnavailableException": { @@ -1960,6 +2058,12 @@ "traits": { "smithy.api#documentation": "

Model performance settings for the request.

" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTier", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier configuration used for serving the request.

" + } } }, "traits": { @@ -1998,7 +2102,7 @@ } }, "traits": { - "smithy.api#documentation": "

The trace object in a response from ConverseStream. Currently, you can only trace guardrails.

" + "smithy.api#documentation": "

The trace object in a response from ConverseStream.

" } }, "com.amazonaws.bedrockruntime#ConverseTokensRequest": { @@ -2015,6 +2119,18 @@ "traits": { "smithy.api#documentation": "

The system content blocks to count tokens for. System content provides instructions or context to the model about how it should behave or respond. The token count will include any system content provided.

" } + }, + "toolConfig": { + "target": "com.amazonaws.bedrockruntime#ToolConfiguration", + "traits": { + "smithy.api#documentation": "

The toolConfig of Converse input request to count tokens for. Configuration information for the tools that the model can use when generating a response.

" + } + }, + "additionalModelRequestFields": { + "target": "smithy.api#Document", + "traits": { + "smithy.api#documentation": "

The additionalModelRequestFields of Converse input request to count tokens for. Use this field when you want to pass additional parameters that the model supports.

" + } } }, "traits": { @@ -2038,7 +2154,7 @@ } }, "traits": { - "smithy.api#documentation": "

The trace object in a response from Converse. Currently, you can only trace guardrails.

" + "smithy.api#documentation": "

The trace object in a response from Converse.

" } }, "com.amazonaws.bedrockruntime#CountTokens": { @@ -2545,6 +2661,15 @@ } } }, + "com.amazonaws.bedrockruntime#GuardrailArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "max": 2048 + }, + "smithy.api#pattern": "^(arn:aws(-[^:]+)?:bedrock:[a-z0-9-]{1,20}:[0-9]{12}:guardrail/[a-z0-9]+)$" + } + }, "com.amazonaws.bedrockruntime#GuardrailAssessment": { "type": "structure", "members": { @@ -2589,6 +2714,9 @@ "traits": { "smithy.api#documentation": "

The invocation metrics for the guardrail assessment.

" } + }, + "appliedGuardrailDetails": { + "target": "com.amazonaws.bedrockruntime#AppliedGuardrailDetails" } }, "traits": { @@ -3096,15 +3224,17 @@ "guardrailIdentifier": { "target": "com.amazonaws.bedrockruntime#GuardrailIdentifier", "traits": { - "smithy.api#documentation": "

The identifier for the guardrail.

", - "smithy.api#required": {} + "smithy.api#addedDefault": {}, + "smithy.api#default": "", + "smithy.api#documentation": "

The identifier for the guardrail.

" } }, "guardrailVersion": { "target": "com.amazonaws.bedrockruntime#GuardrailVersion", "traits": { - "smithy.api#documentation": "

The version of the guardrail.

", - "smithy.api#required": {} + "smithy.api#addedDefault": {}, + "smithy.api#default": "", + "smithy.api#documentation": "

The version of the guardrail.

" } }, "trace": { @@ -3607,7 +3737,7 @@ } }, "traits": { - "smithy.api#documentation": "

A text block that contains text that you want to assess with a guardrail. For more information, see GuardrailConverseContentBlock.

" + "smithy.api#documentation": "

A text block that contains text that you want to assess with a guardrail. For more information, see GuardrailConverseContentBlock.

" } }, "com.amazonaws.bedrockruntime#GuardrailCoverage": { @@ -3664,13 +3794,22 @@ "target": "com.amazonaws.bedrockruntime#GuardrailCustomWord" } }, + "com.amazonaws.bedrockruntime#GuardrailId": { + "type": "string", + "traits": { + "smithy.api#length": { + "max": 2048 + }, + "smithy.api#pattern": "^([a-z0-9]+)$" + } + }, "com.amazonaws.bedrockruntime#GuardrailIdentifier": { "type": "string", "traits": { "smithy.api#length": { "max": 2048 }, - "smithy.api#pattern": "^(([a-z0-9]+)|(arn:aws(-[^:]+)?:bedrock:[a-z0-9-]{1,20}:[0-9]{12}:guardrail/[a-z0-9]+))$" + "smithy.api#pattern": "^(|([a-z0-9]+)|(arn:aws(-[^:]+)?:bedrock:[a-z0-9-]{1,20}:[0-9]{12}:guardrail/[a-z0-9]+))$" } }, "com.amazonaws.bedrockruntime#GuardrailImageBlock": { @@ -3829,6 +3968,35 @@ } } }, + "com.amazonaws.bedrockruntime#GuardrailOrigin": { + "type": "enum", + "members": { + "REQUEST": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "REQUEST" + } + }, + "ACCOUNT_ENFORCED": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ACCOUNT_ENFORCED" + } + }, + "ORGANIZATION_ENFORCED": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ORGANIZATION_ENFORCED" + } + } + } + }, + "com.amazonaws.bedrockruntime#GuardrailOriginList": { + "type": "list", + "member": { + "target": "com.amazonaws.bedrockruntime#GuardrailOrigin" + } + }, "com.amazonaws.bedrockruntime#GuardrailOutputContent": { "type": "structure", "members": { @@ -3869,6 +4037,23 @@ "com.amazonaws.bedrockruntime#GuardrailOutputText": { "type": "string" }, + "com.amazonaws.bedrockruntime#GuardrailOwnership": { + "type": "enum", + "members": { + "SELF": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "SELF" + } + }, + "CROSS_ACCOUNT": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "CROSS_ACCOUNT" + } + } + } + }, "com.amazonaws.bedrockruntime#GuardrailPiiEntityFilter": { "type": "structure", "members": { @@ -4199,7 +4384,7 @@ } }, "traits": { - "smithy.api#documentation": "

The assessment for aPersonally Identifiable Information (PII) policy.

" + "smithy.api#documentation": "

The assessment for a Personally Identifiable Information (PII) policy.

" } }, "com.amazonaws.bedrockruntime#GuardrailSensitiveInformationPolicyFreeUnitsProcessed": { @@ -4214,15 +4399,17 @@ "guardrailIdentifier": { "target": "com.amazonaws.bedrockruntime#GuardrailIdentifier", "traits": { - "smithy.api#documentation": "

The identifier for the guardrail.

", - "smithy.api#required": {} + "smithy.api#addedDefault": {}, + "smithy.api#default": "", + "smithy.api#documentation": "

The identifier for the guardrail.

" } }, "guardrailVersion": { "target": "com.amazonaws.bedrockruntime#GuardrailVersion", "traits": { - "smithy.api#documentation": "

The version of the guardrail.

", - "smithy.api#required": {} + "smithy.api#addedDefault": {}, + "smithy.api#default": "", + "smithy.api#documentation": "

The version of the guardrail.

" } }, "trace": { @@ -4241,7 +4428,7 @@ } }, "traits": { - "smithy.api#documentation": "

Configuration information for a guardrail that you use with the ConverseStream action.

" + "smithy.api#documentation": "

Configuration information for a guardrail that you use with the ConverseStream action.

" } }, "com.amazonaws.bedrockruntime#GuardrailStreamProcessingMode": { @@ -4441,7 +4628,7 @@ } }, "traits": { - "smithy.api#documentation": "

A Top level guardrail trace object. For more information, see ConverseTrace.

" + "smithy.api#documentation": "

A Top level guardrail trace object. For more information, see ConverseTrace.

" } }, "com.amazonaws.bedrockruntime#GuardrailUsage": { @@ -4515,7 +4702,7 @@ "com.amazonaws.bedrockruntime#GuardrailVersion": { "type": "string", "traits": { - "smithy.api#pattern": "^(([1-9][0-9]{0,7})|(DRAFT))$" + "smithy.api#pattern": "^(|([1-9][0-9]{0,7})|(DRAFT))$" } }, "com.amazonaws.bedrockruntime#GuardrailWordPolicyAction": { @@ -4848,6 +5035,13 @@ "smithy.api#documentation": "

Model performance settings for the request.

", "smithy.api#httpHeader": "X-Amzn-Bedrock-PerformanceConfig-Latency" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTierType", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier type used for serving the request.

", + "smithy.api#httpHeader": "X-Amzn-Bedrock-Service-Tier" + } } }, "traits": { @@ -4879,6 +5073,13 @@ "smithy.api#documentation": "

Model performance settings for the request.

", "smithy.api#httpHeader": "X-Amzn-Bedrock-PerformanceConfig-Latency" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTierType", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier type used for serving the request.

", + "smithy.api#httpHeader": "X-Amzn-Bedrock-Service-Tier" + } } }, "traits": { @@ -5170,6 +5371,13 @@ "smithy.api#documentation": "

Model performance settings for the request.

", "smithy.api#httpHeader": "X-Amzn-Bedrock-PerformanceConfig-Latency" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTierType", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier type used for serving the request.

", + "smithy.api#httpHeader": "X-Amzn-Bedrock-Service-Tier" + } } }, "traits": { @@ -5201,6 +5409,13 @@ "smithy.api#documentation": "

Model performance settings for the request.

", "smithy.api#httpHeader": "X-Amzn-Bedrock-PerformanceConfig-Latency" } + }, + "serviceTier": { + "target": "com.amazonaws.bedrockruntime#ServiceTierType", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier type used for serving the request.

", + "smithy.api#httpHeader": "X-Amzn-Bedrock-Service-Tier" + } } }, "traits": { @@ -5834,6 +6049,97 @@ "smithy.api#pattern": "^s3://[a-z0-9][\\.\\-a-z0-9]{1,61}[a-z0-9](/.*)?$" } }, + "com.amazonaws.bedrockruntime#SearchResultBlock": { + "type": "structure", + "members": { + "source": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The source URL or identifier for the content.

", + "smithy.api#required": {} + } + }, + "title": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

A descriptive title for the search result.

", + "smithy.api#required": {} + } + }, + "content": { + "target": "com.amazonaws.bedrockruntime#SearchResultContentBlocks", + "traits": { + "smithy.api#documentation": "

An array of search result content block.

", + "smithy.api#required": {} + } + }, + "citations": { + "target": "com.amazonaws.bedrockruntime#CitationsConfig", + "traits": { + "smithy.api#documentation": "

Configuration setting for citations

" + } + } + }, + "traits": { + "smithy.api#documentation": "

A search result block that enables natural citations with proper source attribution for retrieved content.

This field is only supported by Anthropic Claude Opus 4.1, Opus 4, Sonnet 4.5, Sonnet 4, Sonnet 3.7, and 3.5 Haiku models.

" + } + }, + "com.amazonaws.bedrockruntime#SearchResultContentBlock": { + "type": "structure", + "members": { + "text": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The actual text content

", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

A block within a search result that contains the content.

" + } + }, + "com.amazonaws.bedrockruntime#SearchResultContentBlocks": { + "type": "list", + "member": { + "target": "com.amazonaws.bedrockruntime#SearchResultContentBlock" + } + }, + "com.amazonaws.bedrockruntime#SearchResultLocation": { + "type": "structure", + "members": { + "searchResultIndex": { + "target": "smithy.api#Integer", + "traits": { + "smithy.api#documentation": "

The index of the search result content block where the cited content is found.

", + "smithy.api#range": { + "min": 0 + } + } + }, + "start": { + "target": "smithy.api#Integer", + "traits": { + "smithy.api#documentation": "

The starting position in the content array where the cited content begins.

", + "smithy.api#range": { + "min": 0 + } + } + }, + "end": { + "target": "smithy.api#Integer", + "traits": { + "smithy.api#documentation": "

The ending position in the content array where the cited content ends.

", + "smithy.api#range": { + "min": 0 + } + } + } + }, + "traits": { + "smithy.api#documentation": "

Specifies a search result location within the content array, providing positioning information for cited content using search result index and block positions.

" + } + }, "com.amazonaws.bedrockruntime#ServiceQuotaExceededException": { "type": "structure", "members": { @@ -5847,6 +6153,44 @@ "smithy.api#httpError": 400 } }, + "com.amazonaws.bedrockruntime#ServiceTier": { + "type": "structure", + "members": { + "type": { + "target": "com.amazonaws.bedrockruntime#ServiceTierType", + "traits": { + "smithy.api#documentation": "

Specifies the processing tier type used for serving the request.

", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

Specifies the processing tier configuration used for serving the request.

" + } + }, + "com.amazonaws.bedrockruntime#ServiceTierType": { + "type": "enum", + "members": { + "PRIORITY": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "priority" + } + }, + "DEFAULT": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "default" + } + }, + "FLEX": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "flex" + } + } + } + }, "com.amazonaws.bedrockruntime#ServiceUnavailableException": { "type": "structure", "members": { @@ -5900,7 +6244,7 @@ } }, "traits": { - "smithy.api#documentation": "

The model must request a specific tool. For example, {\"tool\" : {\"name\" : \"Your tool name\"}}.

This field is only supported by Anthropic Claude 3 models.

" + "smithy.api#documentation": "

The model must request a specific tool. For example, {\"tool\" : {\"name\" : \"Your tool name\"}}. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide

This field is only supported by Anthropic Claude 3 models.

" } }, "com.amazonaws.bedrockruntime#StartAsyncInvoke": { @@ -6083,7 +6427,7 @@ } }, "traits": { - "smithy.api#documentation": "

A system content block.

" + "smithy.api#documentation": "

Contains configurations for instructions to provide the model for how to handle input. To learn more, see Using the Converse API.

" } }, "com.amazonaws.bedrockruntime#SystemContentBlocks": { @@ -6092,6 +6436,21 @@ "target": "com.amazonaws.bedrockruntime#SystemContentBlock" } }, + "com.amazonaws.bedrockruntime#SystemTool": { + "type": "structure", + "members": { + "name": { + "target": "com.amazonaws.bedrockruntime#ToolName", + "traits": { + "smithy.api#documentation": "

The name of the system-defined tool that you want to call.

", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

Specifies a system-defined tool for the model to use. System-defined tools are tools that are created and provided by the model provider.

" + } + }, "com.amazonaws.bedrockruntime#Tag": { "type": "structure", "members": { @@ -6241,7 +6600,13 @@ "toolSpec": { "target": "com.amazonaws.bedrockruntime#ToolSpecification", "traits": { - "smithy.api#documentation": "

The specfication for the tool.

" + "smithy.api#documentation": "

The specfication for the tool.

" + } + }, + "systemTool": { + "target": "com.amazonaws.bedrockruntime#SystemTool", + "traits": { + "smithy.api#documentation": "

Specifies the system-defined tool that you want use.

" } }, "cachePoint": { @@ -6252,7 +6617,7 @@ } }, "traits": { - "smithy.api#documentation": "

Information about a tool that you can use with the Converse API. For more information, see Tool use (function calling) in the Amazon Bedrock User Guide.

" + "smithy.api#documentation": "

Information about a tool that you can use with the Converse API. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolChoice": { @@ -6273,12 +6638,12 @@ "tool": { "target": "com.amazonaws.bedrockruntime#SpecificToolChoice", "traits": { - "smithy.api#documentation": "

The Model must request the specified tool. Only supported by Anthropic Claude 3 models.

" + "smithy.api#documentation": "

The Model must request the specified tool. Only supported by Anthropic Claude 3 and Amazon Nova models.

" } } }, "traits": { - "smithy.api#documentation": "

Determines which tools the model should request in a call to Converse or ConverseStream. ToolChoice is only supported by Anthropic Claude 3 models and by Mistral AI Mistral Large.

" + "smithy.api#documentation": "

Determines which tools the model should request in a call to Converse or ConverseStream. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolConfiguration": { @@ -6287,7 +6652,7 @@ "tools": { "target": "com.amazonaws.bedrockruntime#Tools", "traits": { - "smithy.api#documentation": "

An array of tools that you want to pass to a model.

", + "smithy.api#documentation": "

An array of tools that you want to pass to a model.

", "smithy.api#length": { "min": 1 }, @@ -6297,7 +6662,7 @@ "toolChoice": { "target": "com.amazonaws.bedrockruntime#ToolChoice", "traits": { - "smithy.api#documentation": "

If supported by model, forces the model to request a tool.

" + "smithy.api#documentation": "

If supported by model, forces the model to request a tool.

" } } }, @@ -6316,7 +6681,7 @@ } }, "traits": { - "smithy.api#documentation": "

The schema for the tool. The top level schema type must be object.

" + "smithy.api#documentation": "

The schema for the tool. The top level schema type must be object. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolName": { @@ -6335,7 +6700,7 @@ "toolUseId": { "target": "com.amazonaws.bedrockruntime#ToolUseId", "traits": { - "smithy.api#documentation": "

The ID of the tool request that this is the result for.

", + "smithy.api#documentation": "

The ID of the tool request that this is the result for.

", "smithy.api#required": {} } }, @@ -6349,12 +6714,65 @@ "status": { "target": "com.amazonaws.bedrockruntime#ToolResultStatus", "traits": { - "smithy.api#documentation": "

The status for the tool result content block.

This field is only supported Anthropic Claude 3 models.

" + "smithy.api#documentation": "

The status for the tool result content block.

This field is only supported by Amazon Nova and Anthropic Claude 3 and 4 models.

" + } + }, + "type": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The type for the tool result content block.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

A tool result block that contains the results for a tool request that the model previously made. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" + } + }, + "com.amazonaws.bedrockruntime#ToolResultBlockDelta": { + "type": "union", + "members": { + "text": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The reasoning the model used to return the output.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Contains incremental updates to tool results information during streaming responses. This allows clients to build up tool results data progressively as the response is generated.

" + } + }, + "com.amazonaws.bedrockruntime#ToolResultBlockStart": { + "type": "structure", + "members": { + "toolUseId": { + "target": "com.amazonaws.bedrockruntime#ToolUseId", + "traits": { + "smithy.api#documentation": "

The ID of the tool that was used to generate this tool result block.

", + "smithy.api#required": {} + } + }, + "type": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The type for the tool that was used to generate this tool result block.

" + } + }, + "status": { + "target": "com.amazonaws.bedrockruntime#ToolResultStatus", + "traits": { + "smithy.api#documentation": "

The status of the tool result block.

" } } }, "traits": { - "smithy.api#documentation": "

A tool result block that contains the results for a tool request that the model previously made.

" + "smithy.api#documentation": "

The start of a tool result block. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" + } + }, + "com.amazonaws.bedrockruntime#ToolResultBlocksDelta": { + "type": "list", + "member": { + "target": "com.amazonaws.bedrockruntime#ToolResultBlockDelta" } }, "com.amazonaws.bedrockruntime#ToolResultContentBlock": { @@ -6363,19 +6781,19 @@ "json": { "target": "smithy.api#Document", "traits": { - "smithy.api#documentation": "

A tool result that is JSON format data.

" + "smithy.api#documentation": "

A tool result that is JSON format data.

" } }, "text": { "target": "smithy.api#String", "traits": { - "smithy.api#documentation": "

A tool result that is text.

" + "smithy.api#documentation": "

A tool result that is text.

" } }, "image": { "target": "com.amazonaws.bedrockruntime#ImageBlock", "traits": { - "smithy.api#documentation": "

A tool result that is an image.

This field is only supported by Anthropic Claude 3 models.

" + "smithy.api#documentation": "

A tool result that is an image.

This field is only supported by Amazon Nova and Anthropic Claude 3 and 4 models.

" } }, "document": { @@ -6389,10 +6807,16 @@ "traits": { "smithy.api#documentation": "

A tool result that is video.

" } + }, + "searchResult": { + "target": "com.amazonaws.bedrockruntime#SearchResultBlock", + "traits": { + "smithy.api#documentation": "

A tool result that is a search result.

" + } } }, "traits": { - "smithy.api#documentation": "

The tool result content block.

" + "smithy.api#documentation": "

The tool result content block. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolResultContentBlocks": { @@ -6443,7 +6867,7 @@ } }, "traits": { - "smithy.api#documentation": "

The specification for the tool.

" + "smithy.api#documentation": "

The specification for the tool. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolUseBlock": { @@ -6469,10 +6893,16 @@ "smithy.api#documentation": "

The input to pass to the tool.

", "smithy.api#required": {} } + }, + "type": { + "target": "com.amazonaws.bedrockruntime#ToolUseType", + "traits": { + "smithy.api#documentation": "

The type for the tool request.

" + } } }, "traits": { - "smithy.api#documentation": "

A tool use content block. Contains information about a tool that the model is requesting be run., The model uses the result from the tool to generate a response.

" + "smithy.api#documentation": "

A tool use content block. Contains information about a tool that the model is requesting be run., The model uses the result from the tool to generate a response. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolUseBlockDelta": { @@ -6506,10 +6936,16 @@ "smithy.api#documentation": "

The name of the tool that the model is requesting to use.

", "smithy.api#required": {} } + }, + "type": { + "target": "com.amazonaws.bedrockruntime#ToolUseType", + "traits": { + "smithy.api#documentation": "

The type for the tool request.

" + } } }, "traits": { - "smithy.api#documentation": "

The start of a tool use block.

" + "smithy.api#documentation": "

The start of a tool use block. For more information, see Call a tool with the Converse API in the Amazon Bedrock User Guide.

" } }, "com.amazonaws.bedrockruntime#ToolUseId": { @@ -6522,6 +6958,17 @@ "smithy.api#pattern": "^[a-zA-Z0-9_-]+$" } }, + "com.amazonaws.bedrockruntime#ToolUseType": { + "type": "enum", + "members": { + "SERVER_TOOL_USE": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "server_tool_use" + } + } + } + }, "com.amazonaws.bedrockruntime#Tools": { "type": "list", "member": { @@ -6667,6 +7114,26 @@ "traits": { "smithy.api#documentation": "

A video source. You can upload a smaller video as a base64-encoded string as long as the encoded file is less than 25MB. You can also transfer videos up to 1GB in size from an S3 bucket.

" } + }, + "com.amazonaws.bedrockruntime#WebLocation": { + "type": "structure", + "members": { + "url": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The URL that was cited when performing a web search.

" + } + }, + "domain": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The domain that was cited when performing a web search.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Provides the URL and domain information for the website that was cited when performing a web search.

" + } } } } \ No newline at end of file diff --git a/codegen/aws-models/sagemaker-runtime-http2.json b/codegen/aws-models/sagemaker-runtime-http2.json new file mode 100644 index 0000000..28baf1f --- /dev/null +++ b/codegen/aws-models/sagemaker-runtime-http2.json @@ -0,0 +1,2461 @@ +{ + "smithy": "2.0", + "shapes": { + "com.amazonaws.sagemakerruntimehttp2#AmazonSageMakerRuntimeHttp2": { + "type": "service", + "version": "2025-10-01", + "operations": [ + { + "target": "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStream" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "SageMaker Runtime HTTP2", + "arnNamespace": "sagemaker", + "cloudFormationName": "SageMakerRuntime", + "endpointPrefix": "runtime.sagemaker" + }, + "aws.auth#sigv4": { + "name": "sagemaker" + }, + "aws.endpoints#standardRegionalEndpoints": { + "partitionSpecialCases": { + "aws": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ], + "aws-cn": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ], + "aws-us-gov": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ], + "aws-iso": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ], + "aws-iso-b": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ], + "aws-iso-f": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ], + "aws-eusc": [ + { + "endpoint": "https://{service}.{region}.{dnsSuffix}:8443", + "fips": false, + "dualStack": false + }, + { + "endpoint": "https://{service}.{region}.{dualStackDnsSuffix}:8443", + "fips": false, + "dualStack": true + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dnsSuffix}:8443", + "fips": true, + "dualStack": false + }, + { + "endpoint": "https://runtime-fips.sagemaker.{region}.{dualStackDnsSuffix}:8443", + "fips": true, + "dualStack": true + } + ] + } + }, + "aws.protocols#restJson1": { + "http": [ + "h2" + ], + "eventStreamHttp": [ + "h2" + ] + }, + "smithy.api#documentation": "

The Amazon SageMaker AI runtime HTTP/2 API.

", + "smithy.api#title": "Amazon SageMaker Runtime HTTP2", + "smithy.rules#endpointRuleSet": { + "version": "1.0", + "parameters": { + "UseDualStack": { + "builtIn": "AWS::UseDualStack", + "required": true, + "default": false, + "documentation": "When true, use the dual-stack endpoint. If the configured endpoint does not support dual-stack, dispatching the request MAY return an error.", + "type": "boolean" + }, + "UseFIPS": { + "builtIn": "AWS::UseFIPS", + "required": true, + "default": false, + "documentation": "When true, send this request to the FIPS-compliant regional endpoint. If the configured endpoint does not have a FIPS compliant endpoint, dispatching the request will return an error.", + "type": "boolean" + }, + "Endpoint": { + "builtIn": "SDK::Endpoint", + "required": false, + "documentation": "Override the endpoint used to send this request", + "type": "string" + }, + "Region": { + "builtIn": "AWS::Region", + "required": false, + "documentation": "The AWS region used to dispatch the request.", + "type": "string" + } + }, + "rules": [ + { + "conditions": [ + { + "fn": "isSet", + "argv": [ + { + "ref": "Endpoint" + } + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + } + ], + "error": "Invalid Configuration: FIPS and custom endpoint are not supported", + "type": "error" + }, + { + "conditions": [], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", + "type": "error" + }, + { + "conditions": [], + "endpoint": { + "url": { + "ref": "Endpoint" + }, + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + } + ], + "type": "tree" + }, + { + "conditions": [], + "rules": [ + { + "conditions": [ + { + "fn": "isSet", + "argv": [ + { + "ref": "Region" + } + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "aws.partition", + "argv": [ + { + "ref": "Region" + } + ], + "assign": "PartitionResult" + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-cn" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-cn" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-cn" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-cn" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-us-gov" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-us-gov" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-us-gov" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-us-gov" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-b" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-b" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-b" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-b" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-f" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-f" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-f" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-iso-f" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-eusc" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-eusc" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-eusc" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + }, + "aws-eusc" + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "endpoint": { + "url": "https://runtime-fips.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}:8443", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsFIPS" + ] + } + ] + }, + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsDualStack" + ] + } + ] + } + ], + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://runtime.sagemaker-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "FIPS and DualStack are enabled, but this partition does not support one or both", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + true + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + false + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsFIPS" + ] + }, + true + ] + } + ], + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://runtime.sagemaker-fips.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "FIPS is enabled but this partition does not support FIPS", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseFIPS" + }, + false + ] + }, + { + "fn": "booleanEquals", + "argv": [ + { + "ref": "UseDualStack" + }, + true + ] + } + ], + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsDualStack" + ] + } + ] + } + ], + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "DualStack is enabled but this partition does not support DualStack", + "type": "error" + } + ], + "type": "tree" + }, + { + "conditions": [], + "endpoint": { + "url": "https://runtime.sagemaker.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ], + "type": "tree" + } + ], + "type": "tree" + }, + { + "conditions": [], + "error": "Invalid Configuration: Missing Region", + "type": "error" + } + ], + "type": "tree" + } + ] + }, + "smithy.rules#endpointTests": { + "testCases": [ + { + "documentation": "For custom endpoint with region not set and fips disabled", + "expect": { + "endpoint": { + "url": "https://example.com" + } + }, + "params": { + "Endpoint": "https://example.com", + "UseFIPS": false + } + }, + { + "documentation": "For custom endpoint with fips enabled", + "expect": { + "error": "Invalid Configuration: FIPS and custom endpoint are not supported" + }, + "params": { + "Endpoint": "https://example.com", + "UseFIPS": true + } + }, + { + "documentation": "For custom endpoint with fips disabled and dualstack enabled", + "expect": { + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" + }, + "params": { + "Endpoint": "https://example.com", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region us-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-east-1.api.aws:8443" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region us-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-east-1.amazonaws.com:8443" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-east-1.api.aws:8443" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region us-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-east-1.amazonaws.com:8443" + } + }, + "params": { + "Region": "us-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region cn-northwest-1 with FIPS enabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.cn-northwest-1.api.amazonwebservices.com.cn:8443" + } + }, + "params": { + "Region": "cn-northwest-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region cn-northwest-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.cn-northwest-1.amazonaws.com.cn:8443" + } + }, + "params": { + "Region": "cn-northwest-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.cn-northwest-1.api.amazonwebservices.com.cn:8443" + } + }, + "params": { + "Region": "cn-northwest-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.cn-northwest-1.amazonaws.com.cn:8443" + } + }, + "params": { + "Region": "cn-northwest-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region eusc-de-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.eusc-de-east-1.amazonaws.eu:8443" + } + }, + "params": { + "Region": "eusc-de-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region eusc-de-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.eusc-de-east-1.amazonaws.eu:8443" + } + }, + "params": { + "Region": "eusc-de-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-iso-east-1.c2s.ic.gov:8443" + } + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-iso-east-1.c2s.ic.gov:8443" + } + }, + "params": { + "Region": "us-iso-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-isob-east-1.sc2s.sgov.gov:8443" + } + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-isob-east-1.sc2s.sgov.gov:8443" + } + }, + "params": { + "Region": "us-isob-east-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region eu-isoe-west-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker-fips.eu-isoe-west-1.cloud.adc-e.uk" + } + }, + "params": { + "Region": "eu-isoe-west-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region eu-isoe-west-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.eu-isoe-west-1.cloud.adc-e.uk" + } + }, + "params": { + "Region": "eu-isoe-west-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-isof-south-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-isof-south-1.csp.hci.ic.gov:8443" + } + }, + "params": { + "Region": "us-isof-south-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-isof-south-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-isof-south-1.csp.hci.ic.gov:8443" + } + }, + "params": { + "Region": "us-isof-south-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-gov-west-1.api.aws:8443" + } + }, + "params": { + "Region": "us-gov-west-1", + "UseFIPS": true, + "UseDualStack": true + } + }, + { + "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime-fips.sagemaker.us-gov-west-1.amazonaws.com:8443" + } + }, + "params": { + "Region": "us-gov-west-1", + "UseFIPS": true, + "UseDualStack": false + } + }, + { + "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack enabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-gov-west-1.api.aws:8443" + } + }, + "params": { + "Region": "us-gov-west-1", + "UseFIPS": false, + "UseDualStack": true + } + }, + { + "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack disabled", + "expect": { + "endpoint": { + "url": "https://runtime.sagemaker.us-gov-west-1.amazonaws.com:8443" + } + }, + "params": { + "Region": "us-gov-west-1", + "UseFIPS": false, + "UseDualStack": false + } + }, + { + "documentation": "Missing region", + "expect": { + "error": "Invalid Configuration: Missing Region" + } + } + ], + "version": "1.0" + } + } + }, + "com.amazonaws.sagemakerruntimehttp2#InputValidationError": { + "type": "structure", + "members": { + "Message": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error message.

" + } + }, + "ErrorCode": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error code.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

The input fails to satisfy the constraints specified by an AWS service.

", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.sagemakerruntimehttp2#InternalServerError": { + "type": "structure", + "members": { + "Message": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error message.

" + } + }, + "ErrorCode": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error code.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

The request processing has failed because of an unknown error, exception or failure.

", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.sagemakerruntimehttp2#InternalStreamFailure": { + "type": "structure", + "members": { + "Message": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error message.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Internal stream failure that occurs during streaming.

", + "smithy.api#error": "server" + } + }, + "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStream": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStreamInput" + }, + "output": { + "target": "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStreamOutput" + }, + "errors": [ + { + "target": "com.amazonaws.sagemakerruntimehttp2#InputValidationError" + }, + { + "target": "com.amazonaws.sagemakerruntimehttp2#InternalServerError" + }, + { + "target": "com.amazonaws.sagemakerruntimehttp2#InternalStreamFailure" + }, + { + "target": "com.amazonaws.sagemakerruntimehttp2#ModelError" + }, + { + "target": "com.amazonaws.sagemakerruntimehttp2#ModelStreamError" + }, + { + "target": "com.amazonaws.sagemakerruntimehttp2#ServiceUnavailableError" + } + ], + "traits": { + "smithy.api#documentation": "

Invokes a model endpoint with bidirectional streaming capabilities. This operation establishes a persistent connection that allows you to send multiple requests and receive streaming responses from the model in real-time.

Bidirectional streaming is useful for interactive applications such as chatbots, real-time translation, or any scenario where you need to maintain a conversation-like interaction with the model. The connection remains open, allowing you to send additional input and receive responses without establishing a new connection for each request.

For an overview of Amazon SageMaker AI, see How It Works.

Amazon SageMaker AI strips all POST headers except those supported by the API. Amazon SageMaker AI might add additional headers. You should not rely on the behavior of headers outside those enumerated in the request syntax.

Calls to InvokeEndpointWithBidirectionalStream are authenticated by using Amazon Web Services Signature Version 4. For information, see Authenticating Requests (Amazon Web Services Signature Version 4) in the Amazon S3 API Reference.

The bidirectional stream maintains the connection until either the client closes it or the model indicates completion. Each request and response in the stream is sent as an event with optional headers for data type and completion state.

Endpoints are scoped to an individual account, and are not public. The URL does not contain the account ID, but Amazon SageMaker AI determines the account ID from the authentication token that is supplied by the caller.

", + "smithy.api#http": { + "method": "POST", + "uri": "/endpoints/{EndpointName}/invocations-bidirectional-stream" + } + } + }, + "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStreamInput": { + "type": "structure", + "members": { + "EndpointName": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The name of the endpoint to invoke.

", + "smithy.api#httpLabel": {}, + "smithy.api#length": { + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*", + "smithy.api#required": {} + } + }, + "Body": { + "target": "com.amazonaws.sagemakerruntimehttp2#RequestStreamEvent", + "traits": { + "smithy.api#documentation": "

The request payload stream.

", + "smithy.api#httpPayload": {}, + "smithy.api#required": {} + } + }, + "TargetVariant": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Target variant for the request.

", + "smithy.api#httpHeader": "X-Amzn-SageMaker-Target-Variant", + "smithy.api#length": { + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + } + }, + "ModelInvocationPath": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Model invocation path.

", + "smithy.api#httpHeader": "X-Amzn-SageMaker-Model-Invocation-Path", + "smithy.api#length": { + "max": 100 + }, + "smithy.api#pattern": "^[A-Za-z0-9\\-._]+(?:/[A-Za-z0-9\\-._]+)*$" + } + }, + "ModelQueryString": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Model query string.

", + "smithy.api#httpHeader": "X-Amzn-SageMaker-Model-Query-String", + "smithy.api#length": { + "max": 2048 + }, + "smithy.api#pattern": "^[a-zA-Z0-9][A-Za-z0-9_-]*=(?:[A-Za-z0-9._~\\-]|%[0-9A-Fa-f]{2})+(?:&[a-zA-Z0-9][A-Za-z0-9_-]*=(?:[A-Za-z0-9._~\\-]|%[0-9A-Fa-f]{2})+)*$" + } + } + }, + "traits": { + "smithy.api#input": {} + } + }, + "com.amazonaws.sagemakerruntimehttp2#InvokeEndpointWithBidirectionalStreamOutput": { + "type": "structure", + "members": { + "Body": { + "target": "com.amazonaws.sagemakerruntimehttp2#ResponseStreamEvent", + "traits": { + "smithy.api#documentation": "

The response payload stream.

", + "smithy.api#httpPayload": {}, + "smithy.api#required": {} + } + }, + "InvokedProductionVariant": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

The invoked production variant.

", + "smithy.api#httpHeader": "X-Amzn-Invoked-Production-Variant", + "smithy.api#length": { + "max": 1024 + }, + "smithy.api#pattern": "\\p{ASCII}*" + } + } + }, + "traits": { + "smithy.api#output": {} + } + }, + "com.amazonaws.sagemakerruntimehttp2#ModelError": { + "type": "structure", + "members": { + "Message": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error message.

" + } + }, + "OriginalStatusCode": { + "target": "smithy.api#Integer", + "traits": { + "smithy.api#documentation": "

HTTP status code returned by model.

" + } + }, + "OriginalMessage": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Original error message from the model.

" + } + }, + "LogStreamArn": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

CloudWatch log stream ARN.

" + } + }, + "ErrorCode": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error code.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

An error occurred while processing the model.

", + "smithy.api#error": "client", + "smithy.api#httpError": 424 + } + }, + "com.amazonaws.sagemakerruntimehttp2#ModelStreamError": { + "type": "structure", + "members": { + "Message": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error message.

" + } + }, + "ErrorCode": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error code.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Model stream error that occurs during streaming.

", + "smithy.api#error": "client" + } + }, + "com.amazonaws.sagemakerruntimehttp2#RequestPayloadPart": { + "type": "structure", + "members": { + "Bytes": { + "target": "com.amazonaws.sagemakerruntimehttp2#SensitiveBlob", + "traits": { + "smithy.api#documentation": "

The payload bytes.

", + "smithy.api#eventPayload": {} + } + }, + "DataType": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Data type header. Can be one of these possible values: \"UTF8\", \"BINARY\".

", + "smithy.api#eventHeader": {}, + "smithy.api#pattern": "^(UTF8)$|^(BINARY)$" + } + }, + "CompletionState": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Completion state header. Can be one of these possible values: \"PARTIAL\", \"COMPLETE\".

", + "smithy.api#eventHeader": {}, + "smithy.api#pattern": "^(PARTIAL)$|^(COMPLETE)$" + } + }, + "P": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Padding string for alignment.

", + "smithy.api#eventHeader": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

Request payload part structure.

" + } + }, + "com.amazonaws.sagemakerruntimehttp2#RequestStreamEvent": { + "type": "union", + "members": { + "PayloadPart": { + "target": "com.amazonaws.sagemakerruntimehttp2#RequestPayloadPart", + "traits": { + "smithy.api#documentation": "

Payload part event.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Request stream event union.

", + "smithy.api#streaming": {} + } + }, + "com.amazonaws.sagemakerruntimehttp2#ResponsePayloadPart": { + "type": "structure", + "members": { + "Bytes": { + "target": "com.amazonaws.sagemakerruntimehttp2#SensitiveBlob", + "traits": { + "smithy.api#documentation": "

The payload bytes.

", + "smithy.api#eventPayload": {} + } + }, + "DataType": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Data type header. Can be one of these possible values: \"UTF8\", \"BINARY\".

", + "smithy.api#eventHeader": {}, + "smithy.api#pattern": "^(UTF8)$|^(BINARY)$" + } + }, + "CompletionState": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Completion state header. Can be one of these possible values: \"PARTIAL\", \"COMPLETE\".

", + "smithy.api#eventHeader": {}, + "smithy.api#pattern": "^(PARTIAL)$|^(COMPLETE)$" + } + }, + "P": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Padding string for alignment.

", + "smithy.api#eventHeader": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

Response payload part structure.

" + } + }, + "com.amazonaws.sagemakerruntimehttp2#ResponseStreamEvent": { + "type": "union", + "members": { + "PayloadPart": { + "target": "com.amazonaws.sagemakerruntimehttp2#ResponsePayloadPart", + "traits": { + "smithy.api#documentation": "

Payload part event.

" + } + }, + "ModelStreamError": { + "target": "com.amazonaws.sagemakerruntimehttp2#ModelStreamError", + "traits": { + "smithy.api#documentation": "

Model stream error event.

" + } + }, + "InternalStreamFailure": { + "target": "com.amazonaws.sagemakerruntimehttp2#InternalStreamFailure", + "traits": { + "smithy.api#documentation": "

Internal stream failure event.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

Response stream event union.

", + "smithy.api#streaming": {} + } + }, + "com.amazonaws.sagemakerruntimehttp2#SensitiveBlob": { + "type": "blob", + "traits": { + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.sagemakerruntimehttp2#ServiceUnavailableError": { + "type": "structure", + "members": { + "Message": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error message.

" + } + }, + "ErrorCode": { + "target": "smithy.api#String", + "traits": { + "smithy.api#documentation": "

Error code.

" + } + } + }, + "traits": { + "smithy.api#documentation": "

The request has failed due to a temporary failure of the server.

", + "smithy.api#error": "server", + "smithy.api#httpError": 503 + } + } + } +} \ No newline at end of file diff --git a/codegen/aws-models/transcribe-streaming.json b/codegen/aws-models/transcribe-streaming.json index d636846..b5e6ff0 100644 --- a/codegen/aws-models/transcribe-streaming.json +++ b/codegen/aws-models/transcribe-streaming.json @@ -1195,6 +1195,276 @@ "traits": { "smithy.api#enumValue": "zu-ZA" } + }, + "AM_ET": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "am-ET" + } + }, + "BE_BY": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "be-BY" + } + }, + "BG_BG": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "bg-BG" + } + }, + "BN_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "bn-IN" + } + }, + "BS_BA": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "bs-BA" + } + }, + "CKB_IQ": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ckb-IQ" + } + }, + "CKB_IR": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ckb-IR" + } + }, + "CY_WL": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "cy-WL" + } + }, + "ES_MX": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "es-MX" + } + }, + "ET_ET": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "et-ET" + } + }, + "FA_AF": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "fa-AF" + } + }, + "GU_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "gu-IN" + } + }, + "HT_HT": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ht-HT" + } + }, + "HU_HU": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "hu-HU" + } + }, + "HY_AM": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "hy-AM" + } + }, + "IS_IS": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "is-IS" + } + }, + "JV_ID": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "jv-ID" + } + }, + "KA_GE": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ka-GE" + } + }, + "KAB_DZ": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "kab-DZ" + } + }, + "KK_KZ": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "kk-KZ" + } + }, + "KM_KH": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "km-KH" + } + }, + "KN_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "kn-IN" + } + }, + "LG_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "lg-IN" + } + }, + "LT_LT": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "lt-LT" + } + }, + "MK_MK": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "mk-MK" + } + }, + "ML_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ml-IN" + } + }, + "MR_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "mr-IN" + } + }, + "MY_MM": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "my-MM" + } + }, + "NE_NP": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ne-NP" + } + }, + "OR_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "or-IN" + } + }, + "PA_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "pa-IN" + } + }, + "PS_AF": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ps-AF" + } + }, + "SI_LK": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "si-LK" + } + }, + "SL_SI": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sl-SI" + } + }, + "SQ_AL": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sq-AL" + } + }, + "SU_ID": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "su-ID" + } + }, + "SW_BI": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sw-BI" + } + }, + "SW_KE": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sw-KE" + } + }, + "SW_RW": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sw-RW" + } + }, + "SW_TZ": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sw-TZ" + } + }, + "SW_UG": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "sw-UG" + } + }, + "TA_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "ta-IN" + } + }, + "TE_IN": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "te-IN" + } + }, + "TR_TR": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "tr-TR" + } + }, + "UZ_UZ": { + "target": "smithy.api#Unit", + "traits": { + "smithy.api#enumValue": "uz-UZ" + } } } },