From 588ffb4699b767643457a6a53875de2af1222611 Mon Sep 17 00:00:00 2001 From: aws-sdk-go-automation <43143561+aws-sdk-go-automation@users.noreply.github.com> Date: Wed, 3 Apr 2024 14:38:22 -0400 Subject: [PATCH] Release v1.51.14 (2024-04-03) (#5217) Release v1.51.14 (2024-04-03) === ### Service Client Updates * `service/cleanroomsml`: Updates service API and documentation * `service/cloudformation`: Updates service API and documentation * This release would return a new field - PolicyAction in cloudformation's existed DescribeChangeSetResponse, showing actions we are going to apply on the physical resource (e.g., Delete, Retain) according to the user's template * `service/datazone`: Updates service API, documentation, and paginators * `service/docdb`: Updates service API and documentation * This release adds Global Cluster Switchover capability which enables you to change your global cluster's primary AWS Region, the region that serves writes, while preserving the replication between all regions in the global cluster. * `service/groundstation`: Updates service API and documentation * `service/lambda`: Updates service API and documentation * Add Ruby 3.3 (ruby3.3) support to AWS Lambda * `service/medialive`: Updates service API and documentation * Cmaf Ingest outputs are now supported in Media Live * `service/medical-imaging`: Updates service API and documentation * `service/transfer`: Updates service API and documentation * Add ability to specify Security Policies for SFTP Connectors --- CHANGELOG.md | 19 + aws/endpoints/defaults.go | 75 + aws/version.go | 2 +- .../apis/cleanroomsml/2023-09-06/api-2.json | 35 +- .../apis/cleanroomsml/2023-09-06/docs-2.json | 46 +- .../apis/cloudformation/2010-05-15/api-2.json | 12 + .../cloudformation/2010-05-15/docs-2.json | 18 +- models/apis/datazone/2018-05-10/api-2.json | 383 +++- models/apis/datazone/2018-05-10/docs-2.json | 184 +- .../datazone/2018-05-10/paginators-1.json | 6 + models/apis/docdb/2014-10-31/api-2.json | 35 + models/apis/docdb/2014-10-31/docs-2.json | 20 +- .../apis/groundstation/2019-05-23/api-2.json | 8 +- .../apis/groundstation/2019-05-23/docs-2.json | 12 +- models/apis/lambda/2015-03-31/api-2.json | 1 + models/apis/lambda/2015-03-31/docs-2.json | 4 +- models/apis/medialive/2017-10-14/api-2.json | 142 ++ models/apis/medialive/2017-10-14/docs-2.json | 71 +- .../medical-imaging/2023-07-19/api-2.json | 62 +- .../medical-imaging/2023-07-19/docs-2.json | 55 +- models/apis/transfer/2018-11-05/api-2.json | 42 +- models/apis/transfer/2018-11-05/docs-2.json | 53 +- models/endpoints/endpoints.json | 70 +- service/cleanroomsml/api.go | 174 +- service/cleanroomsml/errors.go | 2 +- service/cloudformation/api.go | 67 +- service/datazone/api.go | 1643 ++++++++++++++++- service/datazone/datazoneiface/interface.go | 19 + service/docdb/api.go | 206 +++ service/docdb/docdbiface/interface.go | 4 + service/groundstation/api.go | 66 +- service/lambda/api.go | 8 +- service/medialive/api.go | 392 ++++ service/medicalimaging/api.go | 206 ++- service/medicalimaging/doc.go | 24 +- service/transfer/api.go | 139 +- 36 files changed, 3937 insertions(+), 368 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e468a2dcb2b..4d9515124e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,22 @@ +Release v1.51.14 (2024-04-03) +=== + +### Service Client Updates +* `service/cleanroomsml`: Updates service API and documentation +* `service/cloudformation`: Updates service API and documentation + * This release would return a new field - PolicyAction in cloudformation's existed DescribeChangeSetResponse, showing actions we are going to apply on the physical resource (e.g., Delete, Retain) according to the user's template +* `service/datazone`: Updates service API, documentation, and paginators +* `service/docdb`: Updates service API and documentation + * This release adds Global Cluster Switchover capability which enables you to change your global cluster's primary AWS Region, the region that serves writes, while preserving the replication between all regions in the global cluster. +* `service/groundstation`: Updates service API and documentation +* `service/lambda`: Updates service API and documentation + * Add Ruby 3.3 (ruby3.3) support to AWS Lambda +* `service/medialive`: Updates service API and documentation + * Cmaf Ingest outputs are now supported in Media Live +* `service/medical-imaging`: Updates service API and documentation +* `service/transfer`: Updates service API and documentation + * Add ability to specify Security Policies for SFTP Connectors + Release v1.51.13 (2024-04-02) === diff --git a/aws/endpoints/defaults.go b/aws/endpoints/defaults.go index 4bfeeb07a39..ece2e0dd4d2 100644 --- a/aws/endpoints/defaults.go +++ b/aws/endpoints/defaults.go @@ -32770,6 +32770,12 @@ var awsPartition = partition{ endpointKey{ Region: "ca-central-1", }: endpoint{}, + endpointKey{ + Region: "ca-central-1", + Variant: fipsVariant, + }: endpoint{ + Hostname: "verifiedpermissions-fips.ca-central-1.amazonaws.com", + }, endpointKey{ Region: "eu-central-1", }: endpoint{}, @@ -32794,6 +32800,51 @@ var awsPartition = partition{ endpointKey{ Region: "eu-west-3", }: endpoint{}, + endpointKey{ + Region: "fips-ca-central-1", + }: endpoint{ + Hostname: "verifiedpermissions-fips.ca-central-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "ca-central-1", + }, + Deprecated: boxedTrue, + }, + endpointKey{ + Region: "fips-us-east-1", + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-east-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-1", + }, + Deprecated: boxedTrue, + }, + endpointKey{ + Region: "fips-us-east-2", + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-east-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-2", + }, + Deprecated: boxedTrue, + }, + endpointKey{ + Region: "fips-us-west-1", + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-west-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-west-1", + }, + Deprecated: boxedTrue, + }, + endpointKey{ + Region: "fips-us-west-2", + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-west-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-west-2", + }, + Deprecated: boxedTrue, + }, endpointKey{ Region: "me-central-1", }: endpoint{}, @@ -32806,15 +32857,39 @@ var awsPartition = partition{ endpointKey{ Region: "us-east-1", }: endpoint{}, + endpointKey{ + Region: "us-east-1", + Variant: fipsVariant, + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-east-1.amazonaws.com", + }, endpointKey{ Region: "us-east-2", }: endpoint{}, + endpointKey{ + Region: "us-east-2", + Variant: fipsVariant, + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-east-2.amazonaws.com", + }, endpointKey{ Region: "us-west-1", }: endpoint{}, + endpointKey{ + Region: "us-west-1", + Variant: fipsVariant, + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-west-1.amazonaws.com", + }, endpointKey{ Region: "us-west-2", }: endpoint{}, + endpointKey{ + Region: "us-west-2", + Variant: fipsVariant, + }: endpoint{ + Hostname: "verifiedpermissions-fips.us-west-2.amazonaws.com", + }, }, }, "voice-chime": service{ diff --git a/aws/version.go b/aws/version.go index c269463583b..a10b0af52a9 100644 --- a/aws/version.go +++ b/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.51.13" +const SDKVersion = "1.51.14" diff --git a/models/apis/cleanroomsml/2023-09-06/api-2.json b/models/apis/cleanroomsml/2023-09-06/api-2.json index 9efc829730f..5cc3e911a3c 100644 --- a/models/apis/cleanroomsml/2023-09-06/api-2.json +++ b/models/apis/cleanroomsml/2023-09-06/api-2.json @@ -529,32 +529,6 @@ "type":"list", "member":{"shape":"AudienceModelSummary"} }, - "AudienceModelMetric":{ - "type":"structure", - "required":[ - "forTopKItemPredictions", - "type", - "value" - ], - "members":{ - "forTopKItemPredictions":{"shape":"Integer"}, - "type":{"shape":"AudienceModelMetricType"}, - "value":{"shape":"Double"} - } - }, - "AudienceModelMetricType":{ - "type":"string", - "enum":[ - "NORMALIZED_DISCOUNTED_CUMULATIVE_GAIN", - "MEAN_RECIPROCAL_RANK", - "PRECISION", - "RECALL" - ] - }, - "AudienceModelMetrics":{ - "type":"list", - "member":{"shape":"AudienceModelMetric"} - }, "AudienceModelStatus":{ "type":"string", "enum":[ @@ -591,6 +565,7 @@ "type":"structure", "required":["relevanceMetrics"], "members":{ + "recallMetric":{"shape":"Double"}, "relevanceMetrics":{"shape":"RelevanceMetrics"} } }, @@ -975,7 +950,6 @@ "createTime":{"shape":"SyntheticTimestamp_date_time"}, "description":{"shape":"ResourceDescription"}, "kmsKeyArn":{"shape":"KmsKeyArn"}, - "metrics":{"shape":"AudienceModelMetrics"}, "name":{"shape":"NameString"}, "status":{"shape":"AudienceModelStatus"}, "statusDetails":{"shape":"StatusDetails"}, @@ -1119,10 +1093,6 @@ "min":20, "pattern":"^arn:aws[-a-z]*:iam::[0-9]{12}:role/.+$" }, - "Integer":{ - "type":"integer", - "box":true - }, "KmsKeyArn":{ "type":"string", "max":2048, @@ -1461,8 +1431,7 @@ "TagKey":{ "type":"string", "max":128, - "min":1, - "pattern":"^(?!aws:).{1,128}$" + "min":1 }, "TagKeys":{ "type":"list", diff --git a/models/apis/cleanroomsml/2023-09-06/docs-2.json b/models/apis/cleanroomsml/2023-09-06/docs-2.json index e52cb4c652a..7e9e9d65b6b 100644 --- a/models/apis/cleanroomsml/2023-09-06/docs-2.json +++ b/models/apis/cleanroomsml/2023-09-06/docs-2.json @@ -4,7 +4,7 @@ "operations": { "CreateAudienceModel": "

Defines the information necessary to create an audience model. An audience model is a machine learning model that Clean Rooms ML trains to measure similarity between users. Clean Rooms ML manages training and storing the audience model. The audience model can be used in multiple calls to the StartAudienceGenerationJob API.

", "CreateConfiguredAudienceModel": "

Defines the information necessary to create a configured audience model.

", - "CreateTrainingDataset": "

Defines the information necessary to create a training dataset, or seed audience. In Clean Rooms ML, the TrainingDataset is metadata that points to a Glue table, which is read only during AudienceModel creation.

", + "CreateTrainingDataset": "

Defines the information necessary to create a training dataset. In Clean Rooms ML, the TrainingDataset is metadata that points to a Glue table, which is read only during AudienceModel creation.

", "DeleteAudienceGenerationJob": "

Deletes the specified audience generation job, and removes all data associated with the job.

", "DeleteAudienceModel": "

Specifies an audience model that you want to delete. You can't delete an audience model if there are any configured audience models that depend on the audience model.

", "DeleteConfiguredAudienceModel": "

Deletes the specified configured audience model. You can't delete a configured audience model if there are any lookalike models that use the configured audience model. If you delete a configured audience model, it will be removed from any collaborations that it is associated to.

", @@ -80,7 +80,7 @@ } }, "AudienceGenerationJobDataSource": { - "base": "

Defines the Amazon S3 bucket where the training data for the configured audience is stored.

", + "base": "

Defines the Amazon S3 bucket where the seed audience for the generating audience is stored.

", "refs": { "GetAudienceGenerationJobResponse$seedAudience": "

The seed audience that was used for this audience generation job. This field will be null if the account calling the API is the account that started this audience generation job.

", "StartAudienceGenerationJobRequest$seedAudience": "

The seed audience that is used to generate the audience.

" @@ -125,24 +125,6 @@ "ListAudienceModelsResponse$audienceModels": "

The audience models that match the request.

" } }, - "AudienceModelMetric": { - "base": "

The audience model metrics.

", - "refs": { - "AudienceModelMetrics$member": null - } - }, - "AudienceModelMetricType": { - "base": null, - "refs": { - "AudienceModelMetric$type": "

The audience model metric.

" - } - }, - "AudienceModelMetrics": { - "base": null, - "refs": { - "GetAudienceModelResponse$metrics": "

Accuracy metrics for the model.

" - } - }, "AudienceModelStatus": { "base": null, "refs": { @@ -159,7 +141,7 @@ "AudienceQualityMetrics": { "base": "

Metrics that describe the quality of the generated audience.

", "refs": { - "GetAudienceGenerationJobResponse$metrics": "

The relevance scores for different audience sizes.

" + "GetAudienceGenerationJobResponse$metrics": "

The relevance scores for different audience sizes and the recall score of the generated audience.

" } }, "AudienceSize": { @@ -278,7 +260,7 @@ } }, "ConflictException": { - "base": "

A resource with that name already exists in this region.

", + "base": "

You can't complete this action because another resource depends on this resource.

", "refs": { } }, @@ -383,7 +365,7 @@ "Double": { "base": null, "refs": { - "AudienceModelMetric$value": "

The value of the audience model metric

", + "AudienceQualityMetrics$recallMetric": "

The recall score of the generated audience. Recall is the percentage of the most similar users (by default, the most similar 20%) from a sample of the training data that are included in the seed audience by the audience generation job. Values range from 0-1, larger values indicate a better audience. A recall value approximately equal to the maximum bin size indicates that the audience model is equivalent to random selection.

", "RelevanceMetric$score": "

The relevance score of the generated audience.

" } }, @@ -472,12 +454,6 @@ "GetTrainingDatasetResponse$roleArn": "

The IAM role used to read the training data.

" } }, - "Integer": { - "base": null, - "refs": { - "AudienceModelMetric$forTopKItemPredictions": "

The number of users that were used to generate these model metrics.

" - } - }, "KmsKeyArn": { "base": null, "refs": { @@ -566,7 +542,7 @@ "MinMatchingSeedSize": { "base": null, "refs": { - "CreateConfiguredAudienceModelRequest$minMatchingSeedSize": "

The minimum number of users from the seed audience that must match with users in the training data of the audience model.

", + "CreateConfiguredAudienceModelRequest$minMatchingSeedSize": "

The minimum number of users from the seed audience that must match with users in the training data of the audience model. The default value is 500.

", "GetConfiguredAudienceModelResponse$minMatchingSeedSize": "

The minimum number of users from the seed audience that must match with users in the training data of the audience model.

", "UpdateConfiguredAudienceModelRequest$minMatchingSeedSize": "

The minimum number of users from the seed audience that must match with users in the training data of the audience model.

" } @@ -670,7 +646,7 @@ "base": "

Provides information about an Amazon S3 bucket and path.

", "refs": { "AudienceDestination$s3Destination": "

The Amazon S3 bucket and path for the configured audience.

", - "AudienceGenerationJobDataSource$dataSource": "

The Amazon S3 bucket where the training data for the configured audience is stored.

" + "AudienceGenerationJobDataSource$dataSource": "

Defines the Amazon S3 bucket where the seed audience for the generating audience is stored. A valid data source is a JSON line file in the following format:

{\"user_id\": \"111111\"}

{\"user_id\": \"222222\"}

...

" } }, "S3Path": { @@ -769,16 +745,16 @@ "TagMap": { "base": null, "refs": { - "CreateAudienceModelRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", - "CreateConfiguredAudienceModelRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", + "CreateAudienceModelRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", + "CreateConfiguredAudienceModelRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", "CreateTrainingDatasetRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", "GetAudienceGenerationJobResponse$tags": "

The tags that are associated to this audience generation job.

", "GetAudienceModelResponse$tags": "

The tags that are assigned to the audience model.

", "GetConfiguredAudienceModelResponse$tags": "

The tags that are associated to this configured audience model.

", "GetTrainingDatasetResponse$tags": "

The tags that are assigned to this training dataset.

", "ListTagsForResourceResponse$tags": "

The tags that are associated with the resource.

", - "StartAudienceGenerationJobRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", - "TagResourceRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

" + "StartAudienceGenerationJobRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

", + "TagResourceRequest$tags": "

The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.

The following basic restrictions apply to tags:

" } }, "TagOnCreatePolicy": { diff --git a/models/apis/cloudformation/2010-05-15/api-2.json b/models/apis/cloudformation/2010-05-15/api-2.json index d0ab357d165..fd3028ae2fa 100644 --- a/models/apis/cloudformation/2010-05-15/api-2.json +++ b/models/apis/cloudformation/2010-05-15/api-2.json @@ -3355,6 +3355,17 @@ "Value":{"shape":"Value"} } }, + "PolicyAction":{ + "type":"string", + "enum":[ + "Delete", + "Retain", + "Snapshot", + "ReplaceAndDelete", + "ReplaceAndRetain", + "ReplaceAndSnapshot" + ] + }, "PrivateTypeArn":{ "type":"string", "max":1024, @@ -3589,6 +3600,7 @@ "ResourceChange":{ "type":"structure", "members":{ + "PolicyAction":{"shape":"PolicyAction"}, "Action":{"shape":"ChangeAction"}, "LogicalResourceId":{"shape":"LogicalResourceId"}, "PhysicalResourceId":{"shape":"PhysicalResourceId"}, diff --git a/models/apis/cloudformation/2010-05-15/docs-2.json b/models/apis/cloudformation/2010-05-15/docs-2.json index 2c24eb5ae14..2377b9fb77e 100644 --- a/models/apis/cloudformation/2010-05-15/docs-2.json +++ b/models/apis/cloudformation/2010-05-15/docs-2.json @@ -3,8 +3,8 @@ "service": "CloudFormation

CloudFormation allows you to create and manage Amazon Web Services infrastructure deployments predictably and repeatedly. You can use CloudFormation to leverage Amazon Web Services products, such as Amazon Elastic Compute Cloud, Amazon Elastic Block Store, Amazon Simple Notification Service, Elastic Load Balancing, and Auto Scaling to build highly reliable, highly scalable, cost-effective applications without creating or configuring the underlying Amazon Web Services infrastructure.

With CloudFormation, you declare all your resources and dependencies in a template file. The template defines a collection of resources as a single unit called a stack. CloudFormation creates and deletes all member resources of the stack together and manages all dependencies between the resources for you.

For more information about CloudFormation, see the CloudFormation product page.

CloudFormation makes use of other Amazon Web Services products. If you need additional technical information about a specific Amazon Web Services product, you can find the product's technical documentation at docs.aws.amazon.com.

", "operations": { "ActivateOrganizationsAccess": "

Activate trusted access with Organizations. With trusted access between StackSets and Organizations activated, the management account has permissions to create and manage StackSets for your organization.

", - "ActivateType": "

Activates a public third-party extension, making it available for use in stack templates. For more information, see Using public extensions in the CloudFormation User Guide.

Once you have activated a public third-party extension in your account and Region, use SetTypeConfiguration to specify configuration properties for the extension. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", - "BatchDescribeTypeConfigurations": "

Returns configuration data for the specified CloudFormation extensions, from the CloudFormation registry for the account and Region.

For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", + "ActivateType": "

Activates a public third-party extension, making it available for use in stack templates. For more information, see Using public extensions in the CloudFormation User Guide.

Once you have activated a public third-party extension in your account and Region, use SetTypeConfiguration to specify configuration properties for the extension. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", + "BatchDescribeTypeConfigurations": "

Returns configuration data for the specified CloudFormation extensions, from the CloudFormation registry for the account and Region.

For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", "CancelUpdateStack": "

Cancels an update on the specified stack. If the call completes successfully, the stack rolls back the update and reverts to the previous stack configuration.

You can cancel only stacks that are in the UPDATE_IN_PROGRESS state.

", "ContinueUpdateRollback": "

For a specified stack that's in the UPDATE_ROLLBACK_FAILED state, continues rolling it back to the UPDATE_ROLLBACK_COMPLETE state. Depending on the cause of the failure, you can manually fix the error and continue the rollback. By continuing the rollback, you can return your stack to a working state (the UPDATE_ROLLBACK_COMPLETE state), and then try to update the stack again.

A stack goes into the UPDATE_ROLLBACK_FAILED state when CloudFormation can't roll back all changes after a failed stack update. For example, you might have a stack that's rolling back to an old database instance that was deleted outside of CloudFormation. Because CloudFormation doesn't know the database was deleted, it assumes that the database instance still exists and attempts to roll back to it, causing the update rollback to fail.

", "CreateChangeSet": "

Creates a list of changes that will be applied to a stack so that you can review the changes before executing them. You can create a change set for a stack that doesn't exist or an existing stack. If you create a change set for a stack that doesn't exist, the change set shows all of the resources that CloudFormation will create. If you create a change set for an existing stack, CloudFormation compares the stack's information with the information that you submit in the change set and lists the differences. Use change sets to understand which resources CloudFormation will create or change, and how it will change resources in an existing stack, before you create or update a stack.

To create a change set for a stack that doesn't exist, for the ChangeSetType parameter, specify CREATE. To create a change set for an existing stack, specify UPDATE for the ChangeSetType parameter. To create a change set for an import operation, specify IMPORT for the ChangeSetType parameter. After the CreateChangeSet call successfully completes, CloudFormation starts creating the change set. To check the status of the change set or to review it, use the DescribeChangeSet action.

When you are satisfied with the changes the change set will make, execute the change set by using the ExecuteChangeSet action. CloudFormation doesn't make changes until you execute the change set.

To create a change set for the entire stack hierarchy, set IncludeNestedStacks to True.

", @@ -69,10 +69,10 @@ "PublishType": "

Publishes the specified extension to the CloudFormation registry as a public extension in this Region. Public extensions are available for use by all CloudFormation users. For more information about publishing extensions, see Publishing extensions to make them available for public use in the CloudFormation CLI User Guide.

To publish an extension, you must be registered as a publisher with CloudFormation. For more information, see RegisterPublisher.

", "RecordHandlerProgress": "

Reports progress of a resource handler to CloudFormation.

Reserved for use by the CloudFormation CLI. Don't use this API in your code.

", "RegisterPublisher": "

Registers your account as a publisher of public extensions in the CloudFormation registry. Public extensions are available for use by all CloudFormation users. This publisher ID applies to your account in all Amazon Web Services Regions.

For information about requirements for registering as a public extension publisher, see Registering your account to publish CloudFormation extensions in the CloudFormation CLI User Guide.

", - "RegisterType": "

Registers an extension with the CloudFormation service. Registering an extension makes it available for use in CloudFormation templates in your Amazon Web Services account, and includes:

For more information about how to develop extensions and ready them for registration, see Creating Resource Providers in the CloudFormation CLI User Guide.

You can have a maximum of 50 resource extension versions registered at a time. This maximum is per account and per Region. Use DeregisterType to deregister specific extension versions if necessary.

Once you have initiated a registration request using RegisterType, you can use DescribeTypeRegistration to monitor the progress of the registration request.

Once you have registered a private extension in your account and Region, use SetTypeConfiguration to specify configuration properties for the extension. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", + "RegisterType": "

Registers an extension with the CloudFormation service. Registering an extension makes it available for use in CloudFormation templates in your Amazon Web Services account, and includes:

For more information about how to develop extensions and ready them for registration, see Creating Resource Providers in the CloudFormation CLI User Guide.

You can have a maximum of 50 resource extension versions registered at a time. This maximum is per account and per Region. Use DeregisterType to deregister specific extension versions if necessary.

Once you have initiated a registration request using RegisterType, you can use DescribeTypeRegistration to monitor the progress of the registration request.

Once you have registered a private extension in your account and Region, use SetTypeConfiguration to specify configuration properties for the extension. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", "RollbackStack": "

When specifying RollbackStack, you preserve the state of previously provisioned resources when an operation fails. You can check the status of the stack through the DescribeStacks operation.

Rolls back the specified stack to the last known stable state from CREATE_FAILED or UPDATE_FAILED stack statuses.

This operation will delete a stack if it doesn't contain a last known stable state. A last known stable state includes any status in a *_COMPLETE. This includes the following stack statuses.

", "SetStackPolicy": "

Sets a stack policy for a specified stack.

", - "SetTypeConfiguration": "

Specifies the configuration data for a registered CloudFormation extension, in the given account and Region.

To view the current configuration data for an extension, refer to the ConfigurationSchema element of DescribeType. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

It's strongly recommended that you use dynamic references to restrict sensitive configuration definitions, such as third-party credentials. For more details on dynamic references, see Using dynamic references to specify template values in the CloudFormation User Guide.

", + "SetTypeConfiguration": "

Specifies the configuration data for a registered CloudFormation extension, in the given account and Region.

To view the current configuration data for an extension, refer to the ConfigurationSchema element of DescribeType. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

It's strongly recommended that you use dynamic references to restrict sensitive configuration definitions, such as third-party credentials. For more details on dynamic references, see Using dynamic references to specify template values in the CloudFormation User Guide.

", "SetTypeDefaultVersion": "

Specify the default version of an extension. The default version of an extension will be used in CloudFormation operations.

", "SignalResource": "

Sends a signal to the specified resource with a success or failure status. You can use the SignalResource operation in conjunction with a creation policy or update policy. CloudFormation doesn't proceed with a stack creation or update until resources receive the required number of signals or the timeout period is exceeded. The SignalResource operation is useful in cases where you want to send signals from anywhere other than an Amazon EC2 instance.

", "StartResourceScan": "

Starts a scan of the resources in this account in this Region. You can the status of a scan using the ListResourceScans API action.

", @@ -520,7 +520,7 @@ "ConfigurationSchema": { "base": null, "refs": { - "DescribeTypeOutput$ConfigurationSchema": "

A JSON string that represent the current configuration data for the extension in this account and Region.

To set the configuration data for an extension, use SetTypeConfiguration. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

" + "DescribeTypeOutput$ConfigurationSchema": "

A JSON string that represent the current configuration data for the extension in this account and Region.

To set the configuration data for an extension, use SetTypeConfiguration. For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

" } }, "ConnectionArn": { @@ -2011,6 +2011,12 @@ "PhysicalResourceIdContext$member": null } }, + "PolicyAction": { + "base": null, + "refs": { + "ResourceChange$PolicyAction": "

The action that will be taken on the physical resource when the change set is executed.

" + } + }, "PrivateTypeArn": { "base": null, "refs": { @@ -3644,7 +3650,7 @@ } }, "TypeConfigurationDetails": { - "base": "

Detailed information concerning the specification of a CloudFormation extension in a given account and Region.

For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", + "base": "

Detailed information concerning the specification of a CloudFormation extension in a given account and Region.

For more information, see Configuring extensions at the account level in the CloudFormation User Guide.

", "refs": { "TypeConfigurationDetailsList$member": null } diff --git a/models/apis/datazone/2018-05-10/api-2.json b/models/apis/datazone/2018-05-10/api-2.json index 6c910896425..e22efc8064d 100644 --- a/models/apis/datazone/2018-05-10/api-2.json +++ b/models/apis/datazone/2018-05-10/api-2.json @@ -751,6 +751,25 @@ {"shape":"UnauthorizedException"} ] }, + "DeleteTimeSeriesDataPoints":{ + "name":"DeleteTimeSeriesDataPoints", + "http":{ + "method":"DELETE", + "requestUri":"/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points", + "responseCode":204 + }, + "input":{"shape":"DeleteTimeSeriesDataPointsInput"}, + "output":{"shape":"DeleteTimeSeriesDataPointsOutput"}, + "errors":[ + {"shape":"InternalServerException"}, + {"shape":"ResourceNotFoundException"}, + {"shape":"AccessDeniedException"}, + {"shape":"ThrottlingException"}, + {"shape":"ValidationException"}, + {"shape":"UnauthorizedException"} + ], + "idempotent":true + }, "GetAsset":{ "name":"GetAsset", "http":{ @@ -1135,6 +1154,24 @@ {"shape":"UnauthorizedException"} ] }, + "GetTimeSeriesDataPoint":{ + "name":"GetTimeSeriesDataPoint", + "http":{ + "method":"GET", + "requestUri":"/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points/{identifier}", + "responseCode":200 + }, + "input":{"shape":"GetTimeSeriesDataPointInput"}, + "output":{"shape":"GetTimeSeriesDataPointOutput"}, + "errors":[ + {"shape":"InternalServerException"}, + {"shape":"ResourceNotFoundException"}, + {"shape":"AccessDeniedException"}, + {"shape":"ThrottlingException"}, + {"shape":"ValidationException"}, + {"shape":"UnauthorizedException"} + ] + }, "GetUserProfile":{ "name":"GetUserProfile", "http":{ @@ -1482,6 +1519,45 @@ {"shape":"UnauthorizedException"} ] }, + "ListTimeSeriesDataPoints":{ + "name":"ListTimeSeriesDataPoints", + "http":{ + "method":"GET", + "requestUri":"/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points", + "responseCode":200 + }, + "input":{"shape":"ListTimeSeriesDataPointsInput"}, + "output":{"shape":"ListTimeSeriesDataPointsOutput"}, + "errors":[ + {"shape":"InternalServerException"}, + {"shape":"ResourceNotFoundException"}, + {"shape":"AccessDeniedException"}, + {"shape":"ThrottlingException"}, + {"shape":"ValidationException"}, + {"shape":"UnauthorizedException"} + ] + }, + "PostTimeSeriesDataPoints":{ + "name":"PostTimeSeriesDataPoints", + "http":{ + "method":"POST", + "requestUri":"/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points", + "responseCode":201 + }, + "input":{"shape":"PostTimeSeriesDataPointsInput"}, + "output":{"shape":"PostTimeSeriesDataPointsOutput"}, + "errors":[ + {"shape":"InternalServerException"}, + {"shape":"ResourceNotFoundException"}, + {"shape":"AccessDeniedException"}, + {"shape":"ThrottlingException"}, + {"shape":"ServiceQuotaExceededException"}, + {"shape":"ConflictException"}, + {"shape":"ValidationException"}, + {"shape":"UnauthorizedException"} + ], + "idempotent":true + }, "PutEnvironmentBlueprintConfiguration":{ "name":"PutEnvironmentBlueprintConfiguration", "http":{ @@ -2156,6 +2232,7 @@ "type":"structure", "members":{ "formsOutput":{"shape":"FormOutputList"}, + "latestTimeSeriesDataPointFormsOutput":{"shape":"TimeSeriesDataPointSummaryFormOutputList"}, "readOnlyFormsOutput":{"shape":"FormOutputList"} } }, @@ -2168,6 +2245,7 @@ "createdAt":{"shape":"CreatedAt"}, "forms":{"shape":"Forms"}, "glossaryTerms":{"shape":"DetailedGlossaryTerms"}, + "latestTimeSeriesDataPointForms":{"shape":"TimeSeriesDataPointSummaryFormOutputList"}, "owningProjectId":{"shape":"ProjectId"} } }, @@ -2203,7 +2281,8 @@ "AssetListingItemAdditionalAttributes":{ "type":"structure", "members":{ - "forms":{"shape":"Forms"} + "forms":{"shape":"Forms"}, + "latestTimeSeriesDataPointForms":{"shape":"TimeSeriesDataPointSummaryFormOutputList"} } }, "AssetName":{ @@ -2494,6 +2573,7 @@ "formsOutput":{"shape":"FormOutputList"}, "glossaryTerms":{"shape":"GlossaryTerms"}, "id":{"shape":"AssetId"}, + "latestTimeSeriesDataPointFormsOutput":{"shape":"TimeSeriesDataPointSummaryFormOutputList"}, "listing":{"shape":"AssetListingDetails"}, "name":{"shape":"AssetName"}, "owningProjectId":{"shape":"ProjectId"}, @@ -2557,6 +2637,7 @@ "formsOutput":{"shape":"FormOutputList"}, "glossaryTerms":{"shape":"GlossaryTerms"}, "id":{"shape":"AssetId"}, + "latestTimeSeriesDataPointFormsOutput":{"shape":"TimeSeriesDataPointSummaryFormOutputList"}, "listing":{"shape":"AssetListingDetails"}, "name":{"shape":"AssetName"}, "owningProjectId":{"shape":"ProjectId"}, @@ -2982,7 +3063,7 @@ "location":"uri", "locationName":"domainIdentifier" }, - "entityIdentifier":{"shape":"EntityId"}, + "entityIdentifier":{"shape":"EntityIdentifier"}, "entityRevision":{"shape":"Revision"}, "entityType":{"shape":"EntityType"} } @@ -3324,6 +3405,10 @@ "UNCHANGED" ] }, + "DataPointIdentifier":{ + "type":"string", + "pattern":"^[a-zA-Z0-9_-]{0,36}$" + }, "DataProductDescription":{ "type":"string", "max":4096, @@ -3987,6 +4072,48 @@ } } }, + "DeleteTimeSeriesDataPointsInput":{ + "type":"structure", + "required":[ + "domainIdentifier", + "entityIdentifier", + "entityType", + "formName" + ], + "members":{ + "clientToken":{ + "shape":"ClientToken", + "idempotencyToken":true, + "location":"querystring", + "locationName":"clientToken" + }, + "domainIdentifier":{ + "shape":"DomainId", + "location":"uri", + "locationName":"domainIdentifier" + }, + "entityIdentifier":{ + "shape":"EntityIdentifier", + "location":"uri", + "locationName":"entityIdentifier" + }, + "entityType":{ + "shape":"TimeSeriesEntityType", + "location":"uri", + "locationName":"entityType" + }, + "formName":{ + "shape":"TimeSeriesFormName", + "location":"querystring", + "locationName":"formName" + } + } + }, + "DeleteTimeSeriesDataPointsOutput":{ + "type":"structure", + "members":{ + } + }, "Deployment":{ "type":"structure", "members":{ @@ -4123,7 +4250,14 @@ "member":{"shape":"RegionName"}, "min":0 }, - "EntityId":{"type":"string"}, + "EntityId":{ + "type":"string", + "pattern":"^[a-zA-Z0-9_-]{1,36}$" + }, + "EntityIdentifier":{ + "type":"string", + "pattern":"^[a-zA-Z0-9_-]{1,36}$" + }, "EntityType":{ "type":"string", "enum":["ASSET"] @@ -4557,6 +4691,7 @@ "formsOutput":{"shape":"FormOutputList"}, "glossaryTerms":{"shape":"GlossaryTerms"}, "id":{"shape":"AssetId"}, + "latestTimeSeriesDataPointFormsOutput":{"shape":"TimeSeriesDataPointSummaryFormOutputList"}, "listing":{"shape":"AssetListingDetails"}, "name":{"shape":"AssetName"}, "owningProjectId":{"shape":"ProjectId"}, @@ -5427,6 +5562,53 @@ "updatedBy":{"shape":"UpdatedBy"} } }, + "GetTimeSeriesDataPointInput":{ + "type":"structure", + "required":[ + "domainIdentifier", + "entityIdentifier", + "entityType", + "formName", + "identifier" + ], + "members":{ + "domainIdentifier":{ + "shape":"DomainId", + "location":"uri", + "locationName":"domainIdentifier" + }, + "entityIdentifier":{ + "shape":"EntityIdentifier", + "location":"uri", + "locationName":"entityIdentifier" + }, + "entityType":{ + "shape":"TimeSeriesEntityType", + "location":"uri", + "locationName":"entityType" + }, + "formName":{ + "shape":"TimeSeriesFormName", + "location":"querystring", + "locationName":"formName" + }, + "identifier":{ + "shape":"TimeSeriesDataPointIdentifier", + "location":"uri", + "locationName":"identifier" + } + } + }, + "GetTimeSeriesDataPointOutput":{ + "type":"structure", + "members":{ + "domainId":{"shape":"DomainId"}, + "entityId":{"shape":"EntityId"}, + "entityType":{"shape":"TimeSeriesEntityType"}, + "form":{"shape":"TimeSeriesDataPointFormOutput"}, + "formName":{"shape":"TimeSeriesFormName"} + } + }, "GetUserProfileInput":{ "type":"structure", "required":[ @@ -5557,6 +5739,7 @@ "type":"structure", "required":["relationalFilterConfigurations"], "members":{ + "autoImportDataQualityResult":{"shape":"Boolean"}, "dataAccessRole":{"shape":"GlueRunConfigurationInputDataAccessRoleString"}, "relationalFilterConfigurations":{"shape":"RelationalFilterConfigurations"} } @@ -5570,6 +5753,7 @@ "required":["relationalFilterConfigurations"], "members":{ "accountId":{"shape":"GlueRunConfigurationOutputAccountIdString"}, + "autoImportDataQualityResult":{"shape":"Boolean"}, "dataAccessRole":{"shape":"GlueRunConfigurationOutputDataAccessRoleString"}, "region":{"shape":"GlueRunConfigurationOutputRegionString"}, "relationalFilterConfigurations":{"shape":"RelationalFilterConfigurations"} @@ -6544,6 +6728,64 @@ "tags":{"shape":"Tags"} } }, + "ListTimeSeriesDataPointsInput":{ + "type":"structure", + "required":[ + "domainIdentifier", + "entityIdentifier", + "entityType", + "formName" + ], + "members":{ + "domainIdentifier":{ + "shape":"DomainId", + "location":"uri", + "locationName":"domainIdentifier" + }, + "endedAt":{ + "shape":"Timestamp", + "location":"querystring", + "locationName":"endedAt" + }, + "entityIdentifier":{ + "shape":"EntityIdentifier", + "location":"uri", + "locationName":"entityIdentifier" + }, + "entityType":{ + "shape":"TimeSeriesEntityType", + "location":"uri", + "locationName":"entityType" + }, + "formName":{ + "shape":"TimeSeriesFormName", + "location":"querystring", + "locationName":"formName" + }, + "maxResults":{ + "shape":"MaxResults", + "location":"querystring", + "locationName":"maxResults" + }, + "nextToken":{ + "shape":"PaginationToken", + "location":"querystring", + "locationName":"nextToken" + }, + "startedAt":{ + "shape":"Timestamp", + "location":"querystring", + "locationName":"startedAt" + } + } + }, + "ListTimeSeriesDataPointsOutput":{ + "type":"structure", + "members":{ + "items":{"shape":"TimeSeriesDataPointSummaryFormOutputList"}, + "nextToken":{"shape":"PaginationToken"} + } + }, "ListingId":{ "type":"string", "pattern":"^[a-zA-Z0-9_-]{1,36}$" @@ -6778,6 +7020,46 @@ "max":8192, "min":1 }, + "PostTimeSeriesDataPointsInput":{ + "type":"structure", + "required":[ + "domainIdentifier", + "entityIdentifier", + "entityType", + "forms" + ], + "members":{ + "clientToken":{ + "shape":"ClientToken", + "idempotencyToken":true + }, + "domainIdentifier":{ + "shape":"DomainId", + "location":"uri", + "locationName":"domainIdentifier" + }, + "entityIdentifier":{ + "shape":"EntityIdentifier", + "location":"uri", + "locationName":"entityIdentifier" + }, + "entityType":{ + "shape":"TimeSeriesEntityType", + "location":"uri", + "locationName":"entityType" + }, + "forms":{"shape":"TimeSeriesDataPointFormInputList"} + } + }, + "PostTimeSeriesDataPointsOutput":{ + "type":"structure", + "members":{ + "domainId":{"shape":"DomainId"}, + "entityId":{"shape":"EntityId"}, + "entityType":{"shape":"TimeSeriesEntityType"}, + "forms":{"shape":"TimeSeriesDataPointFormOutputList"} + } + }, "PredictionChoices":{ "type":"list", "member":{"shape":"Integer"} @@ -7391,7 +7673,10 @@ }, "SearchOutputAdditionalAttribute":{ "type":"string", - "enum":["FORMS"] + "enum":[ + "FORMS", + "TIME_SERIES_DATA_POINT_FORMS" + ] }, "SearchOutputAdditionalAttributes":{ "type":"list", @@ -8048,6 +8333,96 @@ "exception":true, "retryable":{"throttling":false} }, + "TimeSeriesDataPointFormInput":{ + "type":"structure", + "required":[ + "formName", + "timestamp", + "typeIdentifier" + ], + "members":{ + "content":{"shape":"TimeSeriesDataPointFormInputContentString"}, + "formName":{"shape":"TimeSeriesFormName"}, + "timestamp":{"shape":"Timestamp"}, + "typeIdentifier":{"shape":"FormTypeIdentifier"}, + "typeRevision":{"shape":"Revision"} + } + }, + "TimeSeriesDataPointFormInputContentString":{ + "type":"string", + "max":500000, + "min":0 + }, + "TimeSeriesDataPointFormInputList":{ + "type":"list", + "member":{"shape":"TimeSeriesDataPointFormInput"} + }, + "TimeSeriesDataPointFormOutput":{ + "type":"structure", + "required":[ + "formName", + "timestamp", + "typeIdentifier" + ], + "members":{ + "content":{"shape":"TimeSeriesDataPointFormOutputContentString"}, + "formName":{"shape":"TimeSeriesFormName"}, + "id":{"shape":"DataPointIdentifier"}, + "timestamp":{"shape":"Timestamp"}, + "typeIdentifier":{"shape":"FormTypeIdentifier"}, + "typeRevision":{"shape":"Revision"} + } + }, + "TimeSeriesDataPointFormOutputContentString":{ + "type":"string", + "max":500000, + "min":0 + }, + "TimeSeriesDataPointFormOutputList":{ + "type":"list", + "member":{"shape":"TimeSeriesDataPointFormOutput"} + }, + "TimeSeriesDataPointIdentifier":{ + "type":"string", + "pattern":"^[a-zA-Z0-9_-]{1,36}$" + }, + "TimeSeriesDataPointSummaryFormOutput":{ + "type":"structure", + "required":[ + "formName", + "timestamp", + "typeIdentifier" + ], + "members":{ + "contentSummary":{"shape":"TimeSeriesDataPointSummaryFormOutputContentSummaryString"}, + "formName":{"shape":"TimeSeriesFormName"}, + "id":{"shape":"DataPointIdentifier"}, + "timestamp":{"shape":"Timestamp"}, + "typeIdentifier":{"shape":"FormTypeIdentifier"}, + "typeRevision":{"shape":"Revision"} + } + }, + "TimeSeriesDataPointSummaryFormOutputContentSummaryString":{ + "type":"string", + "max":20000, + "min":0 + }, + "TimeSeriesDataPointSummaryFormOutputList":{ + "type":"list", + "member":{"shape":"TimeSeriesDataPointSummaryFormOutput"} + }, + "TimeSeriesEntityType":{ + "type":"string", + "enum":[ + "ASSET", + "LISTING" + ] + }, + "TimeSeriesFormName":{ + "type":"string", + "max":128, + "min":1 + }, "Timestamp":{"type":"timestamp"}, "Timezone":{ "type":"string", diff --git a/models/apis/datazone/2018-05-10/docs-2.json b/models/apis/datazone/2018-05-10/docs-2.json index 502aec81e01..d3cd5216637 100644 --- a/models/apis/datazone/2018-05-10/docs-2.json +++ b/models/apis/datazone/2018-05-10/docs-2.json @@ -40,6 +40,7 @@ "DeleteSubscriptionGrant": "

Deletes and subscription grant in Amazon DataZone.

", "DeleteSubscriptionRequest": "

Deletes a subscription request in Amazon DataZone.

", "DeleteSubscriptionTarget": "

Deletes a subscription target in Amazon DataZone.

", + "DeleteTimeSeriesDataPoints": "

Deletes the specified time series form for the specified asset.

", "GetAsset": "

Gets an Amazon DataZone asset.

", "GetAssetType": "

Gets an Amazon DataZone asset type.

", "GetDataSource": "

Gets an Amazon DataZone data source.

", @@ -61,6 +62,7 @@ "GetSubscriptionGrant": "

Gets the subscription grant in Amazon DataZone.

", "GetSubscriptionRequestDetails": "

Gets the details of the specified subscription request.

", "GetSubscriptionTarget": "

Gets the subscription target in Amazon DataZone.

", + "GetTimeSeriesDataPoint": "

Gets the existing data point for the asset.

", "GetUserProfile": "

Gets a user profile in Amazon DataZone.

", "ListAssetRevisions": "

Lists the revisions for the asset.

", "ListDataSourceRunActivities": "

Lists data source run activities.

", @@ -80,6 +82,8 @@ "ListSubscriptionTargets": "

Lists subscription targets in Amazon DataZone.

", "ListSubscriptions": "

Lists subscriptions in Amazon DataZone.

", "ListTagsForResource": "

Lists tags for the specified resource in Amazon DataZone.

", + "ListTimeSeriesDataPoints": "

Lists time series data points.

", + "PostTimeSeriesDataPoints": "

Posts time series data points to Amazon DataZone for the specified asset.

", "PutEnvironmentBlueprintConfiguration": "

Writes the configuration for the specified environment blueprint in Amazon DataZone.

", "RejectPredictions": "

Rejects automatically generated business-friendly metadata for your Amazon DataZone assets.

", "RejectSubscriptionRequest": "

Rejects the specified subscription request.

", @@ -388,6 +392,8 @@ "FormEntryOutput$required": "

Specifies whether a form entry is required.

", "GetDataSourceOutput$publishOnImport": "

Specifies whether the assets that this data source creates in the inventory are to be also automatically published to the catalog.

", "GetSubscriptionOutput$retainPermissions": "

The retain permissions of the subscription.

", + "GlueRunConfigurationInput$autoImportDataQualityResult": "

Specifies whether to automatically import data quality metrics as part of the data source run.

", + "GlueRunConfigurationOutput$autoImportDataQualityResult": "

Specifies whether to automatically import data quality metrics as part of the data source run.

", "ListEnvironmentBlueprintsInput$managed": "

Specifies whether the environment blueprint is managed by Amazon DataZone.

", "RecommendationConfiguration$enableBusinessNameGeneration": "

Specifies whether automatic business name generation is to be enabled or not as part of the recommendation configuration.

", "RevokeSubscriptionInput$retainPermissions": "

Specifies whether permissions are retained when the subscription is revoked.

", @@ -439,6 +445,8 @@ "CreateGlossaryInput$clientToken": "

A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.

", "CreateGlossaryTermInput$clientToken": "

A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.

", "CreateListingChangeSetInput$clientToken": "

A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.

", + "DeleteTimeSeriesDataPointsInput$clientToken": "

A unique, case-sensitive identifier to ensure idempotency of the request. This field is automatically populated if not provided.

", + "PostTimeSeriesDataPointsInput$clientToken": "

A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.

", "RejectPredictionsInput$clientToken": "

A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.

", "StartMetadataGenerationRunInput$clientToken": "

A unique, case-sensitive identifier to ensure idempotency of the request. This field is automatically populated if not provided.

", "UpdateGlossaryInput$clientToken": "

A unique, case-sensitive identifier that is provided to ensure the idempotency of the request.

" @@ -811,6 +819,13 @@ "ListDataSourceRunActivitiesInput$status": "

The status of the data source run.

" } }, + "DataPointIdentifier": { + "base": null, + "refs": { + "TimeSeriesDataPointFormOutput$id": "

The ID of the time series data points form.

", + "TimeSeriesDataPointSummaryFormOutput$id": "

The ID of the time series data points summary form.

" + } + }, "DataProductDescription": { "base": null, "refs": { @@ -1184,6 +1199,16 @@ "refs": { } }, + "DeleteTimeSeriesDataPointsInput": { + "base": null, + "refs": { + } + }, + "DeleteTimeSeriesDataPointsOutput": { + "base": null, + "refs": { + } + }, "Deployment": { "base": "

The details of the last deployment of the environment.

", "refs": { @@ -1374,6 +1399,7 @@ "DeleteSubscriptionGrantOutput$domainId": "

The ID of the Amazon DataZone domain in which the subscription grant is deleted.

", "DeleteSubscriptionRequestInput$domainIdentifier": "

The ID of the Amazon DataZone domain in which the subscription request is deleted.

", "DeleteSubscriptionTargetInput$domainIdentifier": "

The ID of the Amazon DataZone domain in which the subscription target is deleted.

", + "DeleteTimeSeriesDataPointsInput$domainIdentifier": "

The ID of the Amazon DataZone domain that houses the asset for which you want to delete a time series form.

", "DomainSummary$id": "

The ID of the Amazon DataZone domain.

", "EnvironmentBlueprintConfigurationItem$domainId": "

The identifier of the Amazon DataZone domain in which an environment blueprint exists.

", "EnvironmentProfileSummary$domainId": "

The identifier of the Amazon DataZone domain in which the environment profile exists.

", @@ -1422,6 +1448,8 @@ "GetSubscriptionRequestDetailsOutput$domainId": "

The Amazon DataZone domain of the subscription request.

", "GetSubscriptionTargetInput$domainIdentifier": "

The ID of the Amazon DataZone domain in which the subscription target exists.

", "GetSubscriptionTargetOutput$domainId": "

The ID of the Amazon DataZone domain in which the subscription target exists.

", + "GetTimeSeriesDataPointInput$domainIdentifier": "

The ID of the Amazon DataZone domain that houses the asset for which you want to get the data point.

", + "GetTimeSeriesDataPointOutput$domainId": "

The ID of the Amazon DataZone domain that houses the asset data point that you want to get.

", "GetUserProfileInput$domainIdentifier": "

the ID of the Amazon DataZone domain the data portal of which you want to get.

", "GetUserProfileOutput$domainId": "

the identifier of the Amazon DataZone domain of which you want to get the user profile.

", "GlossaryItem$domainId": "

The identifier of the Amazon DataZone domain in which the business glossary exists.

", @@ -1443,8 +1471,11 @@ "ListSubscriptionRequestsInput$domainIdentifier": "

The identifier of the Amazon DataZone domain.

", "ListSubscriptionTargetsInput$domainIdentifier": "

The identifier of the Amazon DataZone domain where you want to list subscription targets.

", "ListSubscriptionsInput$domainIdentifier": "

The identifier of the Amazon DataZone domain.

", + "ListTimeSeriesDataPointsInput$domainIdentifier": "

The ID of the Amazon DataZone domain that houses the assets for which you want to list time series data points.

", "MetadataGenerationRunItem$domainId": "

The ID of the Amazon DataZone domain in which the metadata generation run was created.

", "NotificationOutput$domainIdentifier": "

The identifier of a Amazon DataZone domain in which the notification exists.

", + "PostTimeSeriesDataPointsInput$domainIdentifier": "

The ID of the Amazon DataZone domain in which you want to post time series data points.

", + "PostTimeSeriesDataPointsOutput$domainId": "

The ID of the Amazon DataZone domain in which you want to post time series data points.

", "ProjectSummary$domainId": "

The identifier of a Amazon DataZone domain where the project exists.

", "PutEnvironmentBlueprintConfigurationInput$domainIdentifier": "

The identifier of the Amazon DataZone domain.

", "PutEnvironmentBlueprintConfigurationOutput$domainId": "

The identifier of the Amazon DataZone domain.

", @@ -1552,7 +1583,18 @@ "EntityId": { "base": null, "refs": { - "CreateListingChangeSetInput$entityIdentifier": "

The ID of the asset.

" + "GetTimeSeriesDataPointOutput$entityId": "

The ID of the asset for which you want to get the data point.

", + "PostTimeSeriesDataPointsOutput$entityId": "

The ID of the asset for which you want to post time series data points.

" + } + }, + "EntityIdentifier": { + "base": null, + "refs": { + "CreateListingChangeSetInput$entityIdentifier": "

The ID of the asset.

", + "DeleteTimeSeriesDataPointsInput$entityIdentifier": "

The ID of the asset for which you want to delete a time series form.

", + "GetTimeSeriesDataPointInput$entityIdentifier": "

The ID of the asset for which you want to get the data point.

", + "ListTimeSeriesDataPointsInput$entityIdentifier": "

The ID of the asset for which you want to list data points.

", + "PostTimeSeriesDataPointsInput$entityIdentifier": "

The ID of the asset for which you want to post time series data points.

" } }, "EntityType": { @@ -1928,7 +1970,10 @@ "DeleteFormTypeInput$formTypeIdentifier": "

The ID of the metadata form type that is deleted.

", "FormEntryInput$typeIdentifier": "

The type ID of the form entry.

", "FormInput$typeIdentifier": "

The ID of the metadata form type.

", - "GetFormTypeInput$formTypeIdentifier": "

The ID of the metadata form type.

" + "GetFormTypeInput$formTypeIdentifier": "

The ID of the metadata form type.

", + "TimeSeriesDataPointFormInput$typeIdentifier": "

The ID of the type of the time series data points form.

", + "TimeSeriesDataPointFormOutput$typeIdentifier": "

The ID of the type of the time series data points form.

", + "TimeSeriesDataPointSummaryFormOutput$typeIdentifier": "

The type ID of the time series data points summary form.

" } }, "FormTypeName": { @@ -2195,6 +2240,16 @@ "refs": { } }, + "GetTimeSeriesDataPointInput": { + "base": null, + "refs": { + } + }, + "GetTimeSeriesDataPointOutput": { + "base": null, + "refs": { + } + }, "GetUserProfileInput": { "base": null, "refs": { @@ -2688,6 +2743,16 @@ "refs": { } }, + "ListTimeSeriesDataPointsInput": { + "base": null, + "refs": { + } + }, + "ListTimeSeriesDataPointsOutput": { + "base": null, + "refs": { + } + }, "ListingId": { "base": null, "refs": { @@ -2769,6 +2834,7 @@ "ListSubscriptionRequestsInput$maxResults": "

The maximum number of subscription requests to return in a single call to ListSubscriptionRequests. When the number of subscription requests to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to ListSubscriptionRequests to list the next set of subscription requests.

", "ListSubscriptionTargetsInput$maxResults": "

The maximum number of subscription targets to return in a single call to ListSubscriptionTargets. When the number of subscription targets to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to ListSubscriptionTargets to list the next set of subscription targets.

", "ListSubscriptionsInput$maxResults": "

The maximum number of subscriptions to return in a single call to ListSubscriptions. When the number of subscriptions to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to ListSubscriptions to list the next set of Subscriptions.

", + "ListTimeSeriesDataPointsInput$maxResults": "

The maximum number of data points to return in a single call to ListTimeSeriesDataPoints. When the number of data points to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to ListTimeSeriesDataPoints to list the next set of data points.

", "SearchGroupProfilesInput$maxResults": "

The maximum number of results to return in a single call to SearchGroupProfiles. When the number of results to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to SearchGroupProfiles to list the next set of results.

", "SearchInput$maxResults": "

The maximum number of results to return in a single call to Search. When the number of results to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to Search to list the next set of results.

", "SearchListingsInput$maxResults": "

The maximum number of results to return in a single call to SearchListings. When the number of results to be listed is greater than the value of MaxResults, the response contains a NextToken value that you can use in a subsequent call to SearchListings to list the next set of results.

", @@ -2965,6 +3031,8 @@ "ListSubscriptionTargetsOutput$nextToken": "

When the number of subscription targets is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of subscription targets, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to ListSubscriptionTargets to list the next set of subscription targets.

", "ListSubscriptionsInput$nextToken": "

When the number of subscriptions is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of subscriptions, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to ListSubscriptions to list the next set of subscriptions.

", "ListSubscriptionsOutput$nextToken": "

When the number of subscriptions is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of subscriptions, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to ListSubscriptions to list the next set of subscriptions.

", + "ListTimeSeriesDataPointsInput$nextToken": "

When the number of data points is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of data points, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to ListTimeSeriesDataPoints to list the next set of data points.

", + "ListTimeSeriesDataPointsOutput$nextToken": "

When the number of data points is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of data points, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to ListTimeSeriesDataPoints to list the next set of data points.

", "SearchGroupProfilesInput$nextToken": "

When the number of results is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of results, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to SearchGroupProfiles to list the next set of results.

", "SearchGroupProfilesOutput$nextToken": "

When the number of results is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of results, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to SearchGroupProfiles to list the next set of results.

", "SearchInput$nextToken": "

When the number of results is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of results, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to Search to list the next set of results.

", @@ -2977,6 +3045,16 @@ "SearchUserProfilesOutput$nextToken": "

When the number of results is greater than the default value for the MaxResults parameter, or if you explicitly specify a value for MaxResults that is less than the number of results, the response includes a pagination token named NextToken. You can specify this NextToken value in a subsequent call to SearchUserProfiles to list the next set of results.

" } }, + "PostTimeSeriesDataPointsInput": { + "base": null, + "refs": { + } + }, + "PostTimeSeriesDataPointsOutput": { + "base": null, + "refs": { + } + }, "PredictionChoices": { "base": null, "refs": { @@ -3412,7 +3490,10 @@ "RejectPredictionsOutput$assetRevision": "

The revision that is to be made to the asset.

", "SubscribedAsset$assetRevision": "

The revision of the asset for which the subscription grant is created.

", "SubscribedAssetListing$entityRevision": "

The revision of the published asset for which the subscription grant is created.

", - "SubscribedListing$revision": "

The revision of the published asset for which the subscription grant is created.

" + "SubscribedListing$revision": "

The revision of the published asset for which the subscription grant is created.

", + "TimeSeriesDataPointFormInput$typeRevision": "

The revision type of the time series data points form.

", + "TimeSeriesDataPointFormOutput$typeRevision": "

The revision type of the time series data points form.

", + "TimeSeriesDataPointSummaryFormOutput$typeRevision": "

The type revision of the time series data points summary form.

" } }, "RevokeSubscriptionInput": { @@ -4221,15 +4302,110 @@ "refs": { } }, + "TimeSeriesDataPointFormInput": { + "base": "

The time series data points form.

", + "refs": { + "TimeSeriesDataPointFormInputList$member": null + } + }, + "TimeSeriesDataPointFormInputContentString": { + "base": null, + "refs": { + "TimeSeriesDataPointFormInput$content": "

The content of the time series data points form.

" + } + }, + "TimeSeriesDataPointFormInputList": { + "base": null, + "refs": { + "PostTimeSeriesDataPointsInput$forms": "

The forms that contain the data points that you want to post.

" + } + }, + "TimeSeriesDataPointFormOutput": { + "base": "

The time series data points form.

", + "refs": { + "GetTimeSeriesDataPointOutput$form": "

The time series form that houses the data point that you want to get.

", + "TimeSeriesDataPointFormOutputList$member": null + } + }, + "TimeSeriesDataPointFormOutputContentString": { + "base": null, + "refs": { + "TimeSeriesDataPointFormOutput$content": "

The content of the time series data points form.

" + } + }, + "TimeSeriesDataPointFormOutputList": { + "base": null, + "refs": { + "PostTimeSeriesDataPointsOutput$forms": "

The forms that contain the data points that you have posted.

" + } + }, + "TimeSeriesDataPointIdentifier": { + "base": null, + "refs": { + "GetTimeSeriesDataPointInput$identifier": "

The ID of the data point that you want to get.

" + } + }, + "TimeSeriesDataPointSummaryFormOutput": { + "base": "

The summary of the time series data points form.

", + "refs": { + "TimeSeriesDataPointSummaryFormOutputList$member": null + } + }, + "TimeSeriesDataPointSummaryFormOutputContentSummaryString": { + "base": null, + "refs": { + "TimeSeriesDataPointSummaryFormOutput$contentSummary": "

The content of the summary of the time series data points form.

" + } + }, + "TimeSeriesDataPointSummaryFormOutputList": { + "base": null, + "refs": { + "AssetItemAdditionalAttributes$latestTimeSeriesDataPointFormsOutput": "

The latest time series data points forms included in the additional attributes of an asset.

", + "AssetListing$latestTimeSeriesDataPointForms": "

The latest time series data points forms included in the additional attributes of an asset.

", + "AssetListingItemAdditionalAttributes$latestTimeSeriesDataPointForms": "

The latest time series data points forms included in the additional attributes of an asset.

", + "CreateAssetOutput$latestTimeSeriesDataPointFormsOutput": "

The latest data point that was imported into the time series form for the asset.

", + "CreateAssetRevisionOutput$latestTimeSeriesDataPointFormsOutput": "

The latest data point that was imported into the time series form for the asset.

", + "GetAssetOutput$latestTimeSeriesDataPointFormsOutput": "

The latest data point that was imported into the time series form for the asset.

", + "ListTimeSeriesDataPointsOutput$items": "

The results of the ListTimeSeriesDataPoints action.

" + } + }, + "TimeSeriesEntityType": { + "base": null, + "refs": { + "DeleteTimeSeriesDataPointsInput$entityType": "

The type of the asset for which you want to delete a time series form.

", + "GetTimeSeriesDataPointInput$entityType": "

The type of the asset for which you want to get the data point.

", + "GetTimeSeriesDataPointOutput$entityType": "

The type of the asset for which you want to get the data point.

", + "ListTimeSeriesDataPointsInput$entityType": "

The type of the asset for which you want to list data points.

", + "PostTimeSeriesDataPointsInput$entityType": "

The type of the asset for which you want to post data points.

", + "PostTimeSeriesDataPointsOutput$entityType": "

The type of the asset for which you want to post data points.

" + } + }, + "TimeSeriesFormName": { + "base": null, + "refs": { + "DeleteTimeSeriesDataPointsInput$formName": "

The name of the time series form that you want to delete.

", + "GetTimeSeriesDataPointInput$formName": "

The name of the time series form that houses the data point that you want to get.

", + "GetTimeSeriesDataPointOutput$formName": "

The name of the time series form that houses the data point that you want to get.

", + "ListTimeSeriesDataPointsInput$formName": "

The name of the time series data points form.

", + "TimeSeriesDataPointFormInput$formName": "

The name of the time series data points form.

", + "TimeSeriesDataPointFormOutput$formName": "

The name of the time series data points form.

", + "TimeSeriesDataPointSummaryFormOutput$formName": "

The name of the time series data points summary form.

" + } + }, "Timestamp": { "base": null, "refs": { "ListNotificationsInput$afterTimestamp": "

The time after which you want to list notifications.

", "ListNotificationsInput$beforeTimestamp": "

The time before which you want to list notifications.

", + "ListTimeSeriesDataPointsInput$endedAt": "

The timestamp at which the data points that you wanted to list ended.

", + "ListTimeSeriesDataPointsInput$startedAt": "

The timestamp at which the data points that you want to list started.

", "NotificationOutput$creationTimestamp": "

The timestamp of when a notification was created.

", "NotificationOutput$lastUpdatedTimestamp": "

The timestamp of when the notification was last updated.

", "SubscribedAsset$failureTimestamp": "

The failure timestamp included in the details of the asset for which the subscription grant is created.

", - "SubscribedAsset$grantedTimestamp": "

The timestamp of when the subscription grant to the asset is created.

" + "SubscribedAsset$grantedTimestamp": "

The timestamp of when the subscription grant to the asset is created.

", + "TimeSeriesDataPointFormInput$timestamp": "

The timestamp of the time series data points form.

", + "TimeSeriesDataPointFormOutput$timestamp": "

The timestamp of the time series data points form.

", + "TimeSeriesDataPointSummaryFormOutput$timestamp": "

The timestamp of the time series data points summary form.

" } }, "Timezone": { diff --git a/models/apis/datazone/2018-05-10/paginators-1.json b/models/apis/datazone/2018-05-10/paginators-1.json index dc8c40cdab5..066ba41d2f3 100644 --- a/models/apis/datazone/2018-05-10/paginators-1.json +++ b/models/apis/datazone/2018-05-10/paginators-1.json @@ -102,6 +102,12 @@ "limit_key": "maxResults", "result_key": "items" }, + "ListTimeSeriesDataPoints": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "items" + }, "Search": { "input_token": "nextToken", "output_token": "nextToken", diff --git a/models/apis/docdb/2014-10-31/api-2.json b/models/apis/docdb/2014-10-31/api-2.json index 32d8cb003bf..041f2e05954 100644 --- a/models/apis/docdb/2014-10-31/api-2.json +++ b/models/apis/docdb/2014-10-31/api-2.json @@ -929,6 +929,24 @@ {"shape":"InvalidDBClusterStateFault"}, {"shape":"InvalidDBInstanceStateFault"} ] + }, + "SwitchoverGlobalCluster":{ + "name":"SwitchoverGlobalCluster", + "http":{ + "method":"POST", + "requestUri":"/" + }, + "input":{"shape":"SwitchoverGlobalClusterMessage"}, + "output":{ + "shape":"SwitchoverGlobalClusterResult", + "resultWrapper":"SwitchoverGlobalClusterResult" + }, + "errors":[ + {"shape":"GlobalClusterNotFoundFault"}, + {"shape":"InvalidGlobalClusterStateFault"}, + {"shape":"DBClusterNotFoundFault"}, + {"shape":"InvalidDBClusterStateFault"} + ] } }, "shapes":{ @@ -3108,6 +3126,23 @@ }, "exception":true }, + "SwitchoverGlobalClusterMessage":{ + "type":"structure", + "required":[ + "GlobalClusterIdentifier", + "TargetDbClusterIdentifier" + ], + "members":{ + "GlobalClusterIdentifier":{"shape":"GlobalClusterIdentifier"}, + "TargetDbClusterIdentifier":{"shape":"String"} + } + }, + "SwitchoverGlobalClusterResult":{ + "type":"structure", + "members":{ + "GlobalCluster":{"shape":"GlobalCluster"} + } + }, "TStamp":{"type":"timestamp"}, "Tag":{ "type":"structure", diff --git a/models/apis/docdb/2014-10-31/docs-2.json b/models/apis/docdb/2014-10-31/docs-2.json index 5d897c615bd..34f510e705b 100644 --- a/models/apis/docdb/2014-10-31/docs-2.json +++ b/models/apis/docdb/2014-10-31/docs-2.json @@ -54,7 +54,8 @@ "RestoreDBClusterFromSnapshot": "

Creates a new cluster from a snapshot or cluster snapshot.

If a snapshot is specified, the target cluster is created from the source DB snapshot with a default configuration and default security group.

If a cluster snapshot is specified, the target cluster is created from the source cluster restore point with the same configuration as the original source DB cluster, except that the new cluster is created with the default security group.

", "RestoreDBClusterToPointInTime": "

Restores a cluster to an arbitrary point in time. Users can restore to any point in time before LatestRestorableTime for up to BackupRetentionPeriod days. The target cluster is created from the source cluster with the same configuration as the original cluster, except that the new cluster is created with the default security group.

", "StartDBCluster": "

Restarts the stopped cluster that is specified by DBClusterIdentifier. For more information, see Stopping and Starting an Amazon DocumentDB Cluster.

", - "StopDBCluster": "

Stops the running cluster that is specified by DBClusterIdentifier. The cluster must be in the available state. For more information, see Stopping and Starting an Amazon DocumentDB Cluster.

" + "StopDBCluster": "

Stops the running cluster that is specified by DBClusterIdentifier. The cluster must be in the available state. For more information, see Stopping and Starting an Amazon DocumentDB Cluster.

", + "SwitchoverGlobalCluster": "

Switches over the specified secondary Amazon DocumentDB cluster to be the new primary Amazon DocumentDB cluster in the global database cluster.

" }, "shapes": { "AddSourceIdentifierToSubscriptionMessage": { @@ -887,7 +888,8 @@ "DeleteGlobalClusterResult$GlobalCluster": null, "GlobalClusterList$member": null, "ModifyGlobalClusterResult$GlobalCluster": null, - "RemoveFromGlobalClusterResult$GlobalCluster": null + "RemoveFromGlobalClusterResult$GlobalCluster": null, + "SwitchoverGlobalClusterResult$GlobalCluster": null } }, "GlobalClusterAlreadyExistsFault": { @@ -905,7 +907,8 @@ "GlobalCluster$GlobalClusterIdentifier": "

Contains a user-supplied global cluster identifier. This identifier is the unique key that identifies a global cluster.

", "ModifyGlobalClusterMessage$GlobalClusterIdentifier": "

The identifier for the global cluster being modified. This parameter isn't case-sensitive.

Constraints:

", "ModifyGlobalClusterMessage$NewGlobalClusterIdentifier": "

The new identifier for a global cluster when you modify a global cluster. This value is stored as a lowercase string.

Example: my-cluster2

", - "RemoveFromGlobalClusterMessage$GlobalClusterIdentifier": "

The cluster identifier to detach from the Amazon DocumentDB global cluster.

" + "RemoveFromGlobalClusterMessage$GlobalClusterIdentifier": "

The cluster identifier to detach from the Amazon DocumentDB global cluster.

", + "SwitchoverGlobalClusterMessage$GlobalClusterIdentifier": "

The identifier of the Amazon DocumentDB global database cluster to switch over. The identifier is the unique key assigned by the user when the cluster is created. In other words, it's the name of the global cluster. This parameter isn’t case-sensitive.

Constraints:

Pattern: [A-Za-z][0-9A-Za-z-:._]*

" } }, "GlobalClusterList": { @@ -1682,6 +1685,7 @@ "Subnet$SubnetIdentifier": "

Specifies the identifier of the subnet.

", "Subnet$SubnetStatus": "

Specifies the status of the subnet.

", "SubnetIdentifierList$member": null, + "SwitchoverGlobalClusterMessage$TargetDbClusterIdentifier": "

The identifier of the secondary Amazon DocumentDB cluster to promote to the new primary for the global database cluster. Use the Amazon Resource Name (ARN) for the identifier so that Amazon DocumentDB can locate the cluster in its Amazon Web Services region.

Constraints:

Pattern: [A-Za-z][0-9A-Za-z-:._]*

", "Tag$Key": "

The required name of the tag. The string value can be from 1 to 128 Unicode characters in length and can't be prefixed with \"aws:\" or \"rds:\". The string can contain only the set of Unicode letters, digits, white space, '_', '.', '/', '=', '+', '-' (Java regex: \"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-]*)$\").

", "Tag$Value": "

The optional value of the tag. The string value can be from 1 to 256 Unicode characters in length and can't be prefixed with \"aws:\" or \"rds:\". The string can contain only the set of Unicode letters, digits, white space, '_', '.', '/', '=', '+', '-' (Java regex: \"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-]*)$\").

", "UpgradeTarget$Engine": "

The name of the upgrade target database engine.

", @@ -1731,6 +1735,16 @@ "refs": { } }, + "SwitchoverGlobalClusterMessage": { + "base": null, + "refs": { + } + }, + "SwitchoverGlobalClusterResult": { + "base": null, + "refs": { + } + }, "TStamp": { "base": null, "refs": { diff --git a/models/apis/groundstation/2019-05-23/api-2.json b/models/apis/groundstation/2019-05-23/api-2.json index 0b71503dbef..2634da33f95 100644 --- a/models/apis/groundstation/2019-05-23/api-2.json +++ b/models/apis/groundstation/2019-05-23/api-2.json @@ -819,7 +819,9 @@ "region":{"shape":"String"}, "satelliteArn":{"shape":"satelliteArn"}, "startTime":{"shape":"Timestamp"}, - "tags":{"shape":"TagsMap"} + "tags":{"shape":"TagsMap"}, + "visibilityEndTime":{"shape":"Timestamp"}, + "visibilityStartTime":{"shape":"Timestamp"} } }, "ContactIdResponse":{ @@ -1097,7 +1099,9 @@ "region":{"shape":"String"}, "satelliteArn":{"shape":"satelliteArn"}, "startTime":{"shape":"Timestamp"}, - "tags":{"shape":"TagsMap"} + "tags":{"shape":"TagsMap"}, + "visibilityEndTime":{"shape":"Timestamp"}, + "visibilityStartTime":{"shape":"Timestamp"} } }, "DescribeEphemerisRequest":{ diff --git a/models/apis/groundstation/2019-05-23/docs-2.json b/models/apis/groundstation/2019-05-23/docs-2.json index 3c28b78a241..871e59f859d 100644 --- a/models/apis/groundstation/2019-05-23/docs-2.json +++ b/models/apis/groundstation/2019-05-23/docs-2.json @@ -494,12 +494,12 @@ "DurationInSeconds": { "base": null, "refs": { - "CreateMissionProfileRequest$contactPostPassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a CloudWatch event indicating the pass has finished.

", - "CreateMissionProfileRequest$contactPrePassDurationSeconds": "

Amount of time prior to contact start you’d like to receive a CloudWatch event indicating an upcoming pass.

", + "CreateMissionProfileRequest$contactPostPassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a Ground Station Contact State Change event indicating the pass has finished.

", + "CreateMissionProfileRequest$contactPrePassDurationSeconds": "

Amount of time prior to contact start you’d like to receive a Ground Station Contact State Change event indicating an upcoming pass.

", "GetMissionProfileResponse$contactPostPassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a CloudWatch event indicating the pass has finished.

", "GetMissionProfileResponse$contactPrePassDurationSeconds": "

Amount of time prior to contact start you’d like to receive a CloudWatch event indicating an upcoming pass.

", - "UpdateMissionProfileRequest$contactPostPassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a CloudWatch event indicating the pass has finished.

", - "UpdateMissionProfileRequest$contactPrePassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a CloudWatch event indicating the pass has finished.

" + "UpdateMissionProfileRequest$contactPostPassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a Ground Station Contact State Change event indicating the pass has finished.

", + "UpdateMissionProfileRequest$contactPrePassDurationSeconds": "

Amount of time after a contact ends that you’d like to receive a Ground Station Contact State Change event indicating the pass has finished.

" } }, "Eirp": { @@ -1266,11 +1266,15 @@ "ContactData$postPassEndTime": "

Amount of time after a contact ends that you’d like to receive a CloudWatch event indicating the pass has finished.

", "ContactData$prePassStartTime": "

Amount of time prior to contact start you’d like to receive a CloudWatch event indicating an upcoming pass.

", "ContactData$startTime": "

Start time of a contact in UTC.

", + "ContactData$visibilityEndTime": "

Projected time in UTC your satellite will set below the receive mask. This time is based on the satellite's current active ephemeris for future contacts and the ephemeris that was active during contact execution for completed contacts. This field is not present for contacts with a SCHEDULING or SCHEDULED status.

", + "ContactData$visibilityStartTime": "

Projected time in UTC your satellite will rise above the receive mask. This time is based on the satellite's current active ephemeris for future contacts and the ephemeris that was active during contact execution for completed contacts. This field is not present for contacts with a SCHEDULING or SCHEDULED status.

", "CreateEphemerisRequest$expirationTime": "

An overall expiration time for the ephemeris in UTC, after which it will become EXPIRED.

", "DescribeContactResponse$endTime": "

End time of a contact in UTC.

", "DescribeContactResponse$postPassEndTime": "

Amount of time after a contact ends that you’d like to receive a CloudWatch event indicating the pass has finished.

", "DescribeContactResponse$prePassStartTime": "

Amount of time prior to contact start you’d like to receive a CloudWatch event indicating an upcoming pass.

", "DescribeContactResponse$startTime": "

Start time of a contact in UTC.

", + "DescribeContactResponse$visibilityEndTime": "

Projected time in UTC your satellite will set below the receive mask. This time is based on the satellite's current active ephemeris for future contacts and the ephemeris that was active during contact execution for completed contacts.

", + "DescribeContactResponse$visibilityStartTime": "

Projected time in UTC your satellite will rise above the receive mask. This time is based on the satellite's current active ephemeris for future contacts and the ephemeris that was active during contact execution for completed contacts.

", "DescribeEphemerisResponse$creationTime": "

The time the ephemeris was uploaded in UTC.

", "EphemerisItem$creationTime": "

The time the ephemeris was uploaded in UTC.

", "EphemerisMetaData$epoch": "

The epoch of a default, ephemeris from Space Track in UTC.

This field is not populated for customer-provided ephemerides.

", diff --git a/models/apis/lambda/2015-03-31/api-2.json b/models/apis/lambda/2015-03-31/api-2.json index 621ec829a69..fe7c513fb3a 100644 --- a/models/apis/lambda/2015-03-31/api-2.json +++ b/models/apis/lambda/2015-03-31/api-2.json @@ -3900,6 +3900,7 @@ "python3.10", "java17", "ruby3.2", + "ruby3.3", "python3.11", "nodejs20.x", "provided.al2023", diff --git a/models/apis/lambda/2015-03-31/docs-2.json b/models/apis/lambda/2015-03-31/docs-2.json index 3db0d9eb57f..09a2f27b4f3 100644 --- a/models/apis/lambda/2015-03-31/docs-2.json +++ b/models/apis/lambda/2015-03-31/docs-2.json @@ -1052,9 +1052,9 @@ "ImageConfig": { "base": "

Configuration values that override the container image Dockerfile settings. For more information, see Container image settings.

", "refs": { - "CreateFunctionRequest$ImageConfig": "

Container image configuration values that override the values in the container image Dockerfile.

", + "CreateFunctionRequest$ImageConfig": "

Container image configuration values that override the values in the container image Dockerfile.

", "ImageConfigResponse$ImageConfig": "

Configuration values that override the container image Dockerfile.

", - "UpdateFunctionConfigurationRequest$ImageConfig": "

Container image configuration values that override the values in the container image Docker file.

" + "UpdateFunctionConfigurationRequest$ImageConfig": "

Container image configuration values that override the values in the container image Docker file.

" } }, "ImageConfigError": { diff --git a/models/apis/medialive/2017-10-14/api-2.json b/models/apis/medialive/2017-10-14/api-2.json index daab8578105..4c7fa8ef136 100644 --- a/models/apis/medialive/2017-10-14/api-2.json +++ b/models/apis/medialive/2017-10-14/api-2.json @@ -2894,6 +2894,14 @@ "StreamName": { "shape": "__string", "locationName": "streamName" + }, + "AudioDashRoles": { + "shape": "__listOfDashRoleAudio", + "locationName": "audioDashRoles" + }, + "DvbDashAccessibility": { + "shape": "DvbDashAccessibility", + "locationName": "dvbDashAccessibility" } }, "required": [ @@ -3762,6 +3770,14 @@ "Name": { "shape": "__string", "locationName": "name" + }, + "CaptionDashRoles": { + "shape": "__listOfDashRoleCaption", + "locationName": "captionDashRoles" + }, + "DvbDashAccessibility": { + "shape": "DvbDashAccessibility", + "locationName": "dvbDashAccessibility" } }, "required": [ @@ -11319,6 +11335,10 @@ "UdpGroupSettings": { "shape": "UdpGroupSettings", "locationName": "udpGroupSettings" + }, + "CmafIngestGroupSettings": { + "shape": "CmafIngestGroupSettings", + "locationName": "cmafIngestGroupSettings" } } }, @@ -11378,6 +11398,10 @@ "UdpOutputSettings": { "shape": "UdpOutputSettings", "locationName": "udpOutputSettings" + }, + "CmafIngestOutputSettings": { + "shape": "CmafIngestOutputSettings", + "locationName": "cmafIngestOutputSettings" } } }, @@ -15210,6 +15234,124 @@ "type": "integer", "min": 64, "max": 2160 + }, + "CmafIngestGroupSettings": { + "type": "structure", + "members": { + "Destination": { + "shape": "OutputLocationRef", + "locationName": "destination" + }, + "NielsenId3Behavior": { + "shape": "CmafNielsenId3Behavior", + "locationName": "nielsenId3Behavior" + }, + "Scte35Type": { + "shape": "Scte35Type", + "locationName": "scte35Type" + }, + "SegmentLength": { + "shape": "__integerMin1", + "locationName": "segmentLength" + }, + "SegmentLengthUnits": { + "shape": "CmafIngestSegmentLengthUnits", + "locationName": "segmentLengthUnits" + }, + "SendDelayMs": { + "shape": "__integerMin0Max2000", + "locationName": "sendDelayMs" + } + }, + "required": [ + "Destination" + ] + }, + "CmafIngestOutputSettings": { + "type": "structure", + "members": { + "NameModifier": { + "shape": "__string", + "locationName": "nameModifier" + } + } + }, + "CmafIngestSegmentLengthUnits": { + "type": "string", + "enum": [ + "MILLISECONDS", + "SECONDS" + ] + }, + "CmafNielsenId3Behavior": { + "type": "string", + "enum": [ + "NO_PASSTHROUGH", + "PASSTHROUGH" + ] + }, + "DashRoleAudio": { + "type": "string", + "enum": [ + "ALTERNATE", + "COMMENTARY", + "DESCRIPTION", + "DUB", + "EMERGENCY", + "ENHANCED-AUDIO-INTELLIGIBILITY", + "KARAOKE", + "MAIN", + "SUPPLEMENTARY" + ] + }, + "DashRoleCaption": { + "type": "string", + "enum": [ + "ALTERNATE", + "CAPTION", + "COMMENTARY", + "DESCRIPTION", + "DUB", + "EASYREADER", + "EMERGENCY", + "FORCED-SUBTITLE", + "KARAOKE", + "MAIN", + "METADATA", + "SUBTITLE", + "SUPPLEMENTARY" + ] + }, + "DvbDashAccessibility": { + "type": "string", + "enum": [ + "DVBDASH_1_VISUALLY_IMPAIRED", + "DVBDASH_2_HARD_OF_HEARING", + "DVBDASH_3_SUPPLEMENTAL_COMMENTARY", + "DVBDASH_4_DIRECTORS_COMMENTARY", + "DVBDASH_5_EDUCATIONAL_NOTES", + "DVBDASH_6_MAIN_PROGRAM", + "DVBDASH_7_CLEAN_FEED" + ] + }, + "__listOfDashRoleAudio": { + "type": "list", + "member": { + "shape": "DashRoleAudio" + } + }, + "__listOfDashRoleCaption": { + "type": "list", + "member": { + "shape": "DashRoleCaption" + } + }, + "Scte35Type": { + "type": "string", + "enum": [ + "NONE", + "SCTE_35_WITHOUT_SEGMENTATION" + ] } } } diff --git a/models/apis/medialive/2017-10-14/docs-2.json b/models/apis/medialive/2017-10-14/docs-2.json index ff4edc76a69..8e76cac0b79 100644 --- a/models/apis/medialive/2017-10-14/docs-2.json +++ b/models/apis/medialive/2017-10-14/docs-2.json @@ -2789,7 +2789,8 @@ "MsSmoothGroupSettings$Destination": "Smooth Streaming publish point on an IIS server. Elemental Live acts as a \"Push\" encoder to IIS.", "MultiplexOutputSettings$Destination": "Destination is a Multiplex.", "RtmpOutputSettings$Destination": "The RTMP endpoint excluding the stream name (eg. rtmp://host/appname). For connection to Akamai, a username and password must be supplied. URI fields accept format identifiers.", - "UdpOutputSettings$Destination": "Destination address and port number for RTP or UDP packets. Can be unicast or multicast RTP or UDP (eg. rtp://239.10.10.10:5001 or udp://10.100.100.100:5002)." + "UdpOutputSettings$Destination": "Destination address and port number for RTP or UDP packets. Can be unicast or multicast RTP or UDP (eg. rtp://239.10.10.10:5001 or udp://10.100.100.100:5002).", + "CmafIngestGroupSettings$Destination": "A HTTP destination for the tracks" } }, "OutputLockingSettings": { @@ -3955,6 +3956,7 @@ "__integerMin0Max2000": { "base": null, "refs": { + "CmafIngestGroupSettings$SendDelayMs": "Number of milliseconds to delay the output from the second pipeline." } }, "__integerMin0Max255": { @@ -4074,7 +4076,8 @@ "StaticImageActivateScheduleActionSettings$Height": "The height of the image when inserted into the video, in pixels. The overlay will be scaled up or down to the specified height. Leave blank to use the native height of the overlay.", "StaticImageActivateScheduleActionSettings$Width": "The width of the image when inserted into the video, in pixels. The overlay will be scaled up or down to the specified width. Leave blank to use the native width of the overlay.", "StaticImageOutputActivateScheduleActionSettings$Height": "The height of the image when inserted into the video, in pixels. The overlay will be scaled up or down to the specified height. Leave blank to use the native height of the overlay.", - "StaticImageOutputActivateScheduleActionSettings$Width": "The width of the image when inserted into the video, in pixels. The overlay will be scaled up or down to the specified width. Leave blank to use the native width of the overlay." + "StaticImageOutputActivateScheduleActionSettings$Width": "The width of the image when inserted into the video, in pixels. The overlay will be scaled up or down to the specified width. Leave blank to use the native width of the overlay.", + "CmafIngestGroupSettings$SegmentLength": "The nominal duration of segments. The units are specified in SegmentLengthUnits. The segments will end on the next keyframe after the specified duration, so the actual segment length might be longer, and it might be a fraction of the units." } }, "__integerMin100": { @@ -4944,7 +4947,8 @@ "ValidationError$ElementPath": "Path to the source of the error.", "ValidationError$ErrorMessage": "The error message.", "VideoDescription$Name": "The name of this VideoDescription. Outputs will use this name to uniquely identify this Description. Description names should be unique within this Live Event.", - "__listOf__string$member": null + "__listOf__string$member": null, + "CmafIngestOutputSettings$NameModifier": "String concatenated to the end of the destination filename. Required for multiple outputs of the same type." } }, "__stringMax1000": { @@ -5150,6 +5154,67 @@ "refs": { "H265Settings$TileHeight": "Set this field to set up the picture as a tile. You must also set tileWidth.\nThe tile height must result in 22 or fewer rows in the frame. The tile width\nmust result in 20 or fewer columns in the frame. And finally, the product of the\ncolumn count and row count must be 64 of less.\nIf the tile width and height are specified, MediaLive will override the video\ncodec slices field with a value that MediaLive calculates" } + }, + "CmafIngestGroupSettings": { + "base": "Cmaf Ingest Group Settings", + "refs": { + "OutputGroupSettings$CmafIngestGroupSettings": null + } + }, + "CmafIngestOutputSettings": { + "base": "Cmaf Ingest Output Settings", + "refs": { + "OutputSettings$CmafIngestOutputSettings": null + } + }, + "CmafIngestSegmentLengthUnits": { + "base": "Cmaf Ingest Segment Length Units", + "refs": { + "CmafIngestGroupSettings$SegmentLengthUnits": "Time unit for segment length parameter." + } + }, + "CmafNielsenId3Behavior": { + "base": "Cmaf Nielsen Id3 Behavior", + "refs": { + "CmafIngestGroupSettings$NielsenId3Behavior": "If set to passthrough, Nielsen inaudible tones for media tracking will be detected in the input audio and an equivalent ID3 tag will be inserted in the output." + } + }, + "DashRoleAudio": { + "base": "Dash Role Audio", + "refs": { + "__listOfDashRoleAudio$member": null + } + }, + "DashRoleCaption": { + "base": "Dash Role Caption", + "refs": { + "__listOfDashRoleCaption$member": null + } + }, + "DvbDashAccessibility": { + "base": "Dvb Dash Accessibility", + "refs": { + "AudioDescription$DvbDashAccessibility": "Identifies DVB DASH accessibility signaling in this audio output. Used in Microsoft Smooth Streaming outputs to signal accessibility information to packagers.", + "CaptionDescription$DvbDashAccessibility": "Identifies DVB DASH accessibility signaling in this captions output. Used in Microsoft Smooth Streaming outputs to signal accessibility information to packagers." + } + }, + "__listOfDashRoleAudio": { + "base": null, + "refs": { + "AudioDescription$AudioDashRoles": "Identifies the DASH roles to assign to this audio output. Applies only when the audio output is configured for DVB DASH accessibility signaling." + } + }, + "__listOfDashRoleCaption": { + "base": null, + "refs": { + "CaptionDescription$CaptionDashRoles": "Identifies the DASH roles to assign to this captions output. Applies only when the captions output is configured for DVB DASH accessibility signaling." + } + }, + "Scte35Type": { + "base": "Scte35 Type", + "refs": { + "CmafIngestGroupSettings$Scte35Type": "Type of scte35 track to add. none or scte35WithoutSegmentation" + } } }, "documentation": "API for AWS Elemental MediaLive" diff --git a/models/apis/medical-imaging/2023-07-19/api-2.json b/models/apis/medical-imaging/2023-07-19/api-2.json index 78c91fd8517..3fec0981ffe 100644 --- a/models/apis/medical-imaging/2023-07-19/api-2.json +++ b/models/apis/medical-imaging/2023-07-19/api-2.json @@ -593,6 +593,32 @@ "min":0, "sensitive":true }, + "DICOMSeriesBodyPart":{ + "type":"string", + "max":64, + "min":0, + "sensitive":true + }, + "DICOMSeriesInstanceUID":{ + "type":"string", + "max":64, + "min":0, + "pattern":"(?:[1-9][0-9]*|0)(\\.(?:[1-9][0-9]*|0))*", + "sensitive":true + }, + "DICOMSeriesModality":{ + "type":"string", + "max":16, + "min":0, + "sensitive":true + }, + "DICOMSeriesNumber":{ + "type":"integer", + "box":true, + "max":2147483647, + "min":-2147483648, + "sensitive":true + }, "DICOMStudyDate":{ "type":"string", "max":18, @@ -645,6 +671,10 @@ "DICOMNumberOfStudyRelatedSeries":{"shape":"DICOMNumberOfStudyRelatedSeries"}, "DICOMNumberOfStudyRelatedInstances":{"shape":"DICOMNumberOfStudyRelatedInstances"}, "DICOMAccessionNumber":{"shape":"DICOMAccessionNumber"}, + "DICOMSeriesInstanceUID":{"shape":"DICOMSeriesInstanceUID"}, + "DICOMSeriesModality":{"shape":"DICOMSeriesModality"}, + "DICOMSeriesBodyPart":{"shape":"DICOMSeriesBodyPart"}, + "DICOMSeriesNumber":{"shape":"DICOMSeriesNumber"}, "DICOMStudyDate":{"shape":"DICOMStudyDate"}, "DICOMStudyTime":{"shape":"DICOMStudyTime"} } @@ -1255,7 +1285,9 @@ "DICOMAccessionNumber":{"shape":"DICOMAccessionNumber"}, "DICOMStudyId":{"shape":"DICOMStudyId"}, "DICOMStudyInstanceUID":{"shape":"DICOMStudyInstanceUID"}, + "DICOMSeriesInstanceUID":{"shape":"DICOMSeriesInstanceUID"}, "createdAt":{"shape":"Date"}, + "updatedAt":{"shape":"Date"}, "DICOMStudyDateAndTime":{"shape":"DICOMStudyDateAndTime"} }, "union":true @@ -1263,7 +1295,8 @@ "SearchCriteria":{ "type":"structure", "members":{ - "filters":{"shape":"SearchCriteriaFiltersList"} + "filters":{"shape":"SearchCriteriaFiltersList"}, + "sort":{"shape":"Sort"} }, "sensitive":true }, @@ -1324,6 +1357,7 @@ "required":["imageSetsMetadataSummaries"], "members":{ "imageSetsMetadataSummaries":{"shape":"ImageSetsMetadataSummaries"}, + "sort":{"shape":"Sort"}, "nextToken":{"shape":"NextToken"} } }, @@ -1339,6 +1373,32 @@ }, "exception":true }, + "Sort":{ + "type":"structure", + "required":[ + "sortOrder", + "sortField" + ], + "members":{ + "sortOrder":{"shape":"SortOrder"}, + "sortField":{"shape":"SortField"} + } + }, + "SortField":{ + "type":"string", + "enum":[ + "updatedAt", + "createdAt", + "DICOMStudyDateAndTime" + ] + }, + "SortOrder":{ + "type":"string", + "enum":[ + "ASC", + "DESC" + ] + }, "StartDICOMImportJobRequest":{ "type":"structure", "required":[ diff --git a/models/apis/medical-imaging/2023-07-19/docs-2.json b/models/apis/medical-imaging/2023-07-19/docs-2.json index 7ac5b665e82..0025b7d1fa4 100644 --- a/models/apis/medical-imaging/2023-07-19/docs-2.json +++ b/models/apis/medical-imaging/2023-07-19/docs-2.json @@ -1,12 +1,12 @@ { "version": "2.0", - "service": "

This is the AWS HealthImaging API Reference. AWS HealthImaging is a HIPAA-eligible service that helps health care providers and their medical imaging ISV partners store, transform, and apply machine learning to medical images. For an introduction to the service, see the AWS HealthImaging Developer Guide .

We recommend using one of the AWS Software Development Kits (SDKs) for your programming language, as they take care of request authentication, serialization, and connection management. For more information, see Tools to build on AWS.

For information about using HealthImaging API actions in one of the language-specific AWS SDKs, refer to the See Also link at the end of each section that describes an API action or data type.

The following sections list AWS HealthImaging API actions categorized according to functionality. Links are provided to actions within this Reference, along with links back to corresponding sections in the AWS HealthImaging Developer Guide where you can view console procedures and CLI/SDK code examples.

Data store actions

Import job actions

Image set access actions

Image set modification actions

Tagging actions

", + "service": "

This is the AWS HealthImaging API Reference. AWS HealthImaging is a HIPAA eligible service that empowers healthcare providers, life science organizations, and their software partners to store, analyze, and share medical images in the cloud at petabyte scale. For an introduction to the service, see the AWS HealthImaging Developer Guide .

We recommend using one of the AWS Software Development Kits (SDKs) for your programming language, as they take care of request authentication, serialization, and connection management. For more information, see Tools to build on AWS.

The following sections list AWS HealthImaging API actions categorized according to functionality. Links are provided to actions within this Reference, along with links back to corresponding sections in the AWS HealthImaging Developer Guide where you can view tested code examples.

Data store actions

Import job actions

Image set access actions

Image set modification actions

Tagging actions

", "operations": { "CopyImageSet": "

Copy an image set.

", "CreateDatastore": "

Create a data store.

", "DeleteDatastore": "

Delete a data store.

Before a data store can be deleted, you must first delete all image sets within it.

", "DeleteImageSet": "

Delete an image set.

", - "GetDICOMImportJob": "

Get the import job properties to learn more about the job or job progress.

", + "GetDICOMImportJob": "

Get the import job properties to learn more about the job or job progress.

The jobStatus refers to the execution of the import job. Therefore, an import job can return a jobStatus as COMPLETED even if validation issues are discovered during the import process. If a jobStatus returns as COMPLETED, we still recommend you review the output manifests written to S3, as they provide details on the success or failure of individual P10 object imports.

", "GetDatastore": "

Get data store properties.

", "GetImageFrame": "

Get an image frame (pixel data) for an image set.

", "GetImageSet": "

Get image set properties.

", @@ -171,6 +171,31 @@ "DICOMTags$DICOMPatientSex": "

The patient sex.

" } }, + "DICOMSeriesBodyPart": { + "base": null, + "refs": { + "DICOMTags$DICOMSeriesBodyPart": "

The DICOM provided identifier for the series Body Part Examined.

" + } + }, + "DICOMSeriesInstanceUID": { + "base": null, + "refs": { + "DICOMTags$DICOMSeriesInstanceUID": "

The DICOM provided identifier for the Series Instance UID.

", + "SearchByAttributeValue$DICOMSeriesInstanceUID": "

The Series Instance UID input for search.

" + } + }, + "DICOMSeriesModality": { + "base": null, + "refs": { + "DICOMTags$DICOMSeriesModality": "

The DICOM provided identifier for the series Modality.

" + } + }, + "DICOMSeriesNumber": { + "base": null, + "refs": { + "DICOMTags$DICOMSeriesNumber": "

The DICOM provided identifier for the Series Number.

" + } + }, "DICOMStudyDate": { "base": null, "refs": { @@ -187,20 +212,20 @@ "DICOMStudyDescription": { "base": null, "refs": { - "DICOMTags$DICOMStudyDescription": "

The description of the study.

" + "DICOMTags$DICOMStudyDescription": "

The DICOM provided Study Description.

" } }, "DICOMStudyId": { "base": null, "refs": { - "DICOMTags$DICOMStudyId": "

The DICOM provided studyId.

", + "DICOMTags$DICOMStudyId": "

The DICOM provided identifier for the Study ID.

", "SearchByAttributeValue$DICOMStudyId": "

The DICOM study ID for search.

" } }, "DICOMStudyInstanceUID": { "base": null, "refs": { - "DICOMTags$DICOMStudyInstanceUID": "

The DICOM provided identifier for studyInstanceUid.>

", + "DICOMTags$DICOMStudyInstanceUID": "

The DICOM provided identifier for the Study Instance UID.

", "SearchByAttributeValue$DICOMStudyInstanceUID": "

The DICOM study instance UID for search.

" } }, @@ -312,6 +337,7 @@ "ImageSetsMetadataSummary$createdAt": "

The time an image set is created. Sample creation date is provided in 1985-04-12T23:20:50.52Z format.

", "ImageSetsMetadataSummary$updatedAt": "

The time an image set was last updated.

", "SearchByAttributeValue$createdAt": "

The created at time of the image set provided for search.

", + "SearchByAttributeValue$updatedAt": "

The timestamp input for search.

", "StartDICOMImportJobResponse$submittedAt": "

The timestamp when the import job was submitted.

", "UpdateImageSetMetadataResponse$createdAt": "

The timestamp when image set metadata was created.

", "UpdateImageSetMetadataResponse$updatedAt": "

The timestamp when image set metadata was updated.

" @@ -702,6 +728,25 @@ "refs": { } }, + "Sort": { + "base": "

Sort search results.

", + "refs": { + "SearchCriteria$sort": "

The sort input for search criteria.

", + "SearchImageSetsResponse$sort": "

The sort order for image set search results.

" + } + }, + "SortField": { + "base": null, + "refs": { + "Sort$sortField": "

The sort field for search criteria.

" + } + }, + "SortOrder": { + "base": null, + "refs": { + "Sort$sortOrder": "

The sort order for search criteria.

" + } + }, "StartDICOMImportJobRequest": { "base": null, "refs": { diff --git a/models/apis/transfer/2018-11-05/api-2.json b/models/apis/transfer/2018-11-05/api-2.json index 4326a89fbe3..98db5848e71 100644 --- a/models/apis/transfer/2018-11-05/api-2.json +++ b/models/apis/transfer/2018-11-05/api-2.json @@ -1085,6 +1085,12 @@ "min":19, "pattern":"c-([0-9a-f]{17})" }, + "ConnectorSecurityPolicyName":{ + "type":"string", + "max":100, + "min":0, + "pattern":"TransferSFTPConnectorSecurityPolicy-[A-Za-z0-9-]+" + }, "CopyStepDetails":{ "type":"structure", "members":{ @@ -1162,7 +1168,8 @@ "AccessRole":{"shape":"Role"}, "LoggingRole":{"shape":"Role"}, "Tags":{"shape":"Tags"}, - "SftpConfig":{"shape":"SftpConnectorConfig"} + "SftpConfig":{"shape":"SftpConnectorConfig"}, + "SecurityPolicyName":{"shape":"ConnectorSecurityPolicyName"} } }, "CreateConnectorResponse":{ @@ -1657,7 +1664,8 @@ "LoggingRole":{"shape":"Role"}, "Tags":{"shape":"Tags"}, "SftpConfig":{"shape":"SftpConnectorConfig"}, - "ServiceManagedEgressIpAddresses":{"shape":"ServiceManagedEgressIpAddresses"} + "ServiceManagedEgressIpAddresses":{"shape":"ServiceManagedEgressIpAddresses"}, + "SecurityPolicyName":{"shape":"ConnectorSecurityPolicyName"} } }, "DescribedExecution":{ @@ -1707,7 +1715,10 @@ "SshCiphers":{"shape":"SecurityPolicyOptions"}, "SshKexs":{"shape":"SecurityPolicyOptions"}, "SshMacs":{"shape":"SecurityPolicyOptions"}, - "TlsCiphers":{"shape":"SecurityPolicyOptions"} + "TlsCiphers":{"shape":"SecurityPolicyOptions"}, + "SshHostKeyAlgorithms":{"shape":"SecurityPolicyOptions"}, + "Type":{"shape":"SecurityPolicyResourceType"}, + "Protocols":{"shape":"SecurityPolicyProtocols"} } }, "DescribedServer":{ @@ -2785,7 +2796,7 @@ "type":"string", "max":100, "min":0, - "pattern":"TransferSecurityPolicy-.+" + "pattern":"Transfer[A-Za-z0-9]*SecurityPolicy-[A-Za-z0-9-]+" }, "SecurityPolicyNames":{ "type":"list", @@ -2800,6 +2811,26 @@ "type":"list", "member":{"shape":"SecurityPolicyOption"} }, + "SecurityPolicyProtocol":{ + "type":"string", + "enum":[ + "SFTP", + "FTPS" + ] + }, + "SecurityPolicyProtocols":{ + "type":"list", + "member":{"shape":"SecurityPolicyProtocol"}, + "max":5, + "min":1 + }, + "SecurityPolicyResourceType":{ + "type":"string", + "enum":[ + "SERVER", + "CONNECTOR" + ] + }, "SendWorkflowStepStateRequest":{ "type":"structure", "required":[ @@ -3211,7 +3242,8 @@ "As2Config":{"shape":"As2ConnectorConfig"}, "AccessRole":{"shape":"Role"}, "LoggingRole":{"shape":"Role"}, - "SftpConfig":{"shape":"SftpConnectorConfig"} + "SftpConfig":{"shape":"SftpConnectorConfig"}, + "SecurityPolicyName":{"shape":"ConnectorSecurityPolicyName"} } }, "UpdateConnectorResponse":{ diff --git a/models/apis/transfer/2018-11-05/docs-2.json b/models/apis/transfer/2018-11-05/docs-2.json index 177e8588119..715a851dbac 100644 --- a/models/apis/transfer/2018-11-05/docs-2.json +++ b/models/apis/transfer/2018-11-05/docs-2.json @@ -26,7 +26,7 @@ "DescribeExecution": "

You can use DescribeExecution to check the details of the execution of the specified workflow.

This API call only returns details for in-progress workflows.

If you provide an ID for an execution that is not in progress, or if the execution doesn't match the specified workflow ID, you receive a ResourceNotFound exception.

", "DescribeHostKey": "

Returns the details of the host key that's specified by the HostKeyId and ServerId.

", "DescribeProfile": "

Returns the details of the profile that's specified by the ProfileId.

", - "DescribeSecurityPolicy": "

Describes the security policy that is attached to your file transfer protocol-enabled server. The response contains a description of the security policy's properties. For more information about security policies, see Working with security policies.

", + "DescribeSecurityPolicy": "

Describes the security policy that is attached to your server or SFTP connector. The response contains a description of the security policy's properties. For more information about security policies, see Working with security policies for servers or Working with security policies for SFTP connectors.

", "DescribeServer": "

Describes a file transfer protocol-enabled server that you specify by passing the ServerId parameter.

The response contains a description of a server's properties. When you set EndpointType to VPC, the response will contain the EndpointDetails.

", "DescribeUser": "

Describes the user assigned to the specific file transfer protocol-enabled server, as identified by its ServerId property.

The response from this call returns the properties of the user associated with the ServerId value that was specified.

", "DescribeWorkflow": "

Describes the specified workflow.

", @@ -40,7 +40,7 @@ "ListExecutions": "

Lists all in-progress executions for the specified workflow.

If the specified workflow ID cannot be found, ListExecutions returns a ResourceNotFound exception.

", "ListHostKeys": "

Returns a list of host keys for the server that's specified by the ServerId parameter.

", "ListProfiles": "

Returns a list of the profiles for your system. If you want to limit the results to a certain number, supply a value for the MaxResults parameter. If you ran the command previously and received a value for NextToken, you can supply that value to continue listing profiles from where you left off.

", - "ListSecurityPolicies": "

Lists the security policies that are attached to your file transfer protocol-enabled servers.

", + "ListSecurityPolicies": "

Lists the security policies that are attached to your servers and SFTP connectors. For more information about security policies, see Working with security policies for servers or Working with security policies for SFTP connectors.

", "ListServers": "

Lists the file transfer protocol-enabled servers that are associated with your Amazon Web Services account.

", "ListTagsForResource": "

Lists all of the tags associated with the Amazon Resource Name (ARN) that you specify. The resource can be a user, server, or role.

", "ListUsers": "

Lists the users for a file transfer protocol-enabled server that you specify by passing the ServerId parameter.

", @@ -279,6 +279,14 @@ "UpdateConnectorResponse$ConnectorId": "

Returns the identifier of the connector object that you are updating.

" } }, + "ConnectorSecurityPolicyName": { + "base": null, + "refs": { + "CreateConnectorRequest$SecurityPolicyName": "

Specifies the name of the security policy for the connector.

", + "DescribedConnector$SecurityPolicyName": "

The text name of the security policy for the specified connector.

", + "UpdateConnectorRequest$SecurityPolicyName": "

Specifies the name of the security policy for the connector.

" + } + }, "CopyStepDetails": { "base": "

Each step type has its own StepDetails structure.

", "refs": { @@ -602,7 +610,7 @@ } }, "DescribedSecurityPolicy": { - "base": "

Describes the properties of a security policy that was specified. For more information about security policies, see Working with security policies.

", + "base": "

Describes the properties of a security policy that you specify. For more information about security policies, see Working with security policies for servers or Working with security policies for SFTP connectors.

", "refs": { "DescribeSecurityPolicyResponse$SecurityPolicy": "

An array containing the properties of the security policy.

" } @@ -797,7 +805,7 @@ "Fips": { "base": null, "refs": { - "DescribedSecurityPolicy$Fips": "

Specifies whether this policy enables Federal Information Processing Standards (FIPS).

" + "DescribedSecurityPolicy$Fips": "

Specifies whether this policy enables Federal Information Processing Standards (FIPS). This parameter applies to both server and connector security policies.

" } }, "Function": { @@ -1611,12 +1619,12 @@ "SecurityPolicyName": { "base": null, "refs": { - "CreateServerRequest$SecurityPolicyName": "

Specifies the name of the security policy that is attached to the server.

", - "DescribeSecurityPolicyRequest$SecurityPolicyName": "

Specifies the name of the security policy that is attached to the server.

", - "DescribedSecurityPolicy$SecurityPolicyName": "

Specifies the name of the security policy that is attached to the server.

", - "DescribedServer$SecurityPolicyName": "

Specifies the name of the security policy that is attached to the server.

", + "CreateServerRequest$SecurityPolicyName": "

Specifies the name of the security policy for the server.

", + "DescribeSecurityPolicyRequest$SecurityPolicyName": "

Specify the text name of the security policy for which you want the details.

", + "DescribedSecurityPolicy$SecurityPolicyName": "

The text name of the specified security policy.

", + "DescribedServer$SecurityPolicyName": "

Specifies the name of the security policy for the server.

", "SecurityPolicyNames$member": null, - "UpdateServerRequest$SecurityPolicyName": "

Specifies the name of the security policy that is attached to the server.

" + "UpdateServerRequest$SecurityPolicyName": "

Specifies the name of the security policy for the server.

" } }, "SecurityPolicyNames": { @@ -1634,10 +1642,29 @@ "SecurityPolicyOptions": { "base": null, "refs": { - "DescribedSecurityPolicy$SshCiphers": "

Specifies the enabled Secure Shell (SSH) cipher encryption algorithms in the security policy that is attached to the server.

", - "DescribedSecurityPolicy$SshKexs": "

Specifies the enabled SSH key exchange (KEX) encryption algorithms in the security policy that is attached to the server.

", - "DescribedSecurityPolicy$SshMacs": "

Specifies the enabled SSH message authentication code (MAC) encryption algorithms in the security policy that is attached to the server.

", - "DescribedSecurityPolicy$TlsCiphers": "

Specifies the enabled Transport Layer Security (TLS) cipher encryption algorithms in the security policy that is attached to the server.

" + "DescribedSecurityPolicy$SshCiphers": "

Lists the enabled Secure Shell (SSH) cipher encryption algorithms in the security policy that is attached to the server or connector. This parameter applies to both server and connector security policies.

", + "DescribedSecurityPolicy$SshKexs": "

Lists the enabled SSH key exchange (KEX) encryption algorithms in the security policy that is attached to the server or connector. This parameter applies to both server and connector security policies.

", + "DescribedSecurityPolicy$SshMacs": "

Lists the enabled SSH message authentication code (MAC) encryption algorithms in the security policy that is attached to the server or connector. This parameter applies to both server and connector security policies.

", + "DescribedSecurityPolicy$TlsCiphers": "

Lists the enabled Transport Layer Security (TLS) cipher encryption algorithms in the security policy that is attached to the server.

This parameter only applies to security policies for servers.

", + "DescribedSecurityPolicy$SshHostKeyAlgorithms": "

Lists the host key algorithms for the security policy.

This parameter only applies to security policies for connectors.

" + } + }, + "SecurityPolicyProtocol": { + "base": null, + "refs": { + "SecurityPolicyProtocols$member": null + } + }, + "SecurityPolicyProtocols": { + "base": null, + "refs": { + "DescribedSecurityPolicy$Protocols": "

Lists the file transfer protocols that the security policy applies to.

" + } + }, + "SecurityPolicyResourceType": { + "base": null, + "refs": { + "DescribedSecurityPolicy$Type": "

The resource type to which the security policy applies, either server or connector.

" } }, "SendWorkflowStepStateRequest": { diff --git a/models/endpoints/endpoints.json b/models/endpoints/endpoints.json index bce7521055b..a6588d922dd 100644 --- a/models/endpoints/endpoints.json +++ b/models/endpoints/endpoints.json @@ -18987,7 +18987,12 @@ "ap-southeast-2" : { }, "ap-southeast-3" : { }, "ap-southeast-4" : { }, - "ca-central-1" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, "eu-central-1" : { }, "eu-central-2" : { }, "eu-north-1" : { }, @@ -18996,13 +19001,68 @@ "eu-west-1" : { }, "eu-west-2" : { }, "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-west-2.amazonaws.com" + }, "me-central-1" : { }, "me-south-1" : { }, "sa-east-1" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-1" : { }, - "us-west-2" : { } + "us-east-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } } }, "voice-chime" : { diff --git a/service/cleanroomsml/api.go b/service/cleanroomsml/api.go index 1810392a535..d4184402494 100644 --- a/service/cleanroomsml/api.go +++ b/service/cleanroomsml/api.go @@ -72,7 +72,7 @@ func (c *CleanRoomsML) CreateAudienceModelRequest(input *CreateAudienceModelInpu // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -163,7 +163,7 @@ func (c *CleanRoomsML) CreateConfiguredAudienceModelRequest(input *CreateConfigu // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -242,9 +242,9 @@ func (c *CleanRoomsML) CreateTrainingDatasetRequest(input *CreateTrainingDataset // CreateTrainingDataset API operation for AWS Clean Rooms ML. // -// Defines the information necessary to create a training dataset, or seed audience. -// In Clean Rooms ML, the TrainingDataset is metadata that points to a Glue -// table, which is read only during AudienceModel creation. +// Defines the information necessary to create a training dataset. In Clean +// Rooms ML, the TrainingDataset is metadata that points to a Glue table, which +// is read only during AudienceModel creation. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -256,7 +256,7 @@ func (c *CleanRoomsML) CreateTrainingDatasetRequest(input *CreateTrainingDataset // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -343,7 +343,7 @@ func (c *CleanRoomsML) DeleteAudienceGenerationJobRequest(input *DeleteAudienceG // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -434,7 +434,7 @@ func (c *CleanRoomsML) DeleteAudienceModelRequest(input *DeleteAudienceModelInpu // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -526,7 +526,7 @@ func (c *CleanRoomsML) DeleteConfiguredAudienceModelRequest(input *DeleteConfigu // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -705,7 +705,7 @@ func (c *CleanRoomsML) DeleteTrainingDatasetRequest(input *DeleteTrainingDataset // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -2084,7 +2084,7 @@ func (c *CleanRoomsML) StartAudienceExportJobRequest(input *StartAudienceExportJ // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -2175,7 +2175,7 @@ func (c *CleanRoomsML) StartAudienceGenerationJobRequest(input *StartAudienceGen // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -2440,7 +2440,7 @@ func (c *CleanRoomsML) UpdateConfiguredAudienceModelRequest(input *UpdateConfigu // Returned Error Types: // // - ConflictException -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. // // - ValidationException // The request parameters for this request are incorrect. @@ -2706,13 +2706,19 @@ func (s *AudienceExportJobSummary) SetUpdateTime(v time.Time) *AudienceExportJob return s } -// Defines the Amazon S3 bucket where the training data for the configured audience +// Defines the Amazon S3 bucket where the seed audience for the generating audience // is stored. type AudienceGenerationJobDataSource struct { _ struct{} `type:"structure"` - // The Amazon S3 bucket where the training data for the configured audience - // is stored. + // Defines the Amazon S3 bucket where the seed audience for the generating audience + // is stored. A valid data source is a JSON line file in the following format: + // + // {"user_id": "111111"} + // + // {"user_id": "222222"} + // + // ... // // DataSource is a required field DataSource *S3ConfigMap `locationName:"dataSource" type:"structure" required:"true"` @@ -2896,62 +2902,6 @@ func (s *AudienceGenerationJobSummary) SetUpdateTime(v time.Time) *AudienceGener return s } -// The audience model metrics. -type AudienceModelMetric struct { - _ struct{} `type:"structure"` - - // The number of users that were used to generate these model metrics. - // - // ForTopKItemPredictions is a required field - ForTopKItemPredictions *int64 `locationName:"forTopKItemPredictions" type:"integer" required:"true"` - - // The audience model metric. - // - // Type is a required field - Type *string `locationName:"type" type:"string" required:"true" enum:"AudienceModelMetricType"` - - // The value of the audience model metric - // - // Value is a required field - Value *float64 `locationName:"value" type:"double" required:"true"` -} - -// String returns the string representation. -// -// API parameter values that are decorated as "sensitive" in the API will not -// be included in the string output. The member name will be present, but the -// value will be replaced with "sensitive". -func (s AudienceModelMetric) String() string { - return awsutil.Prettify(s) -} - -// GoString returns the string representation. -// -// API parameter values that are decorated as "sensitive" in the API will not -// be included in the string output. The member name will be present, but the -// value will be replaced with "sensitive". -func (s AudienceModelMetric) GoString() string { - return s.String() -} - -// SetForTopKItemPredictions sets the ForTopKItemPredictions field's value. -func (s *AudienceModelMetric) SetForTopKItemPredictions(v int64) *AudienceModelMetric { - s.ForTopKItemPredictions = &v - return s -} - -// SetType sets the Type field's value. -func (s *AudienceModelMetric) SetType(v string) *AudienceModelMetric { - s.Type = &v - return s -} - -// SetValue sets the Value field's value. -func (s *AudienceModelMetric) SetValue(v float64) *AudienceModelMetric { - s.Value = &v - return s -} - // Information about the audience model. type AudienceModelSummary struct { _ struct{} `type:"structure"` @@ -3055,6 +3005,14 @@ func (s *AudienceModelSummary) SetUpdateTime(v time.Time) *AudienceModelSummary type AudienceQualityMetrics struct { _ struct{} `type:"structure"` + // The recall score of the generated audience. Recall is the percentage of the + // most similar users (by default, the most similar 20%) from a sample of the + // training data that are included in the seed audience by the audience generation + // job. Values range from 0-1, larger values indicate a better audience. A recall + // value approximately equal to the maximum bin size indicates that the audience + // model is equivalent to random selection. + RecallMetric *float64 `locationName:"recallMetric" type:"double"` + // The relevance scores of the generated audience. // // RelevanceMetrics is a required field @@ -3079,6 +3037,12 @@ func (s AudienceQualityMetrics) GoString() string { return s.String() } +// SetRecallMetric sets the RecallMetric field's value. +func (s *AudienceQualityMetrics) SetRecallMetric(v float64) *AudienceQualityMetrics { + s.RecallMetric = &v + return s +} + // SetRelevanceMetrics sets the RelevanceMetrics field's value. func (s *AudienceQualityMetrics) SetRelevanceMetrics(v []*RelevanceMetric) *AudienceQualityMetrics { s.RelevanceMetrics = v @@ -3470,7 +3434,7 @@ func (s *ConfiguredAudienceModelSummary) SetUpdateTime(v time.Time) *ConfiguredA return s } -// A resource with that name already exists in this region. +// You can't complete this action because another resource depends on this resource. type ConflictException struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` @@ -3575,10 +3539,10 @@ type CreateAudienceModelInput struct { // * Do not use aws:, AWS:, or any upper or lowercase combination of such // as a prefix for keys as it is reserved for AWS use. You cannot edit or // delete tag keys with this prefix. Values can have this prefix. If a tag - // value has aws as its prefix but the key does not, then Forecast considers - // it to be a user tag and will count against the limit of 50 tags. Tags - // with only the key prefix of aws do not count against your tags per resource - // limit. + // value has aws as its prefix but the key does not, then Clean Rooms ML + // considers it to be a user tag and will count against the limit of 50 tags. + // Tags with only the key prefix of aws do not count against your tags per + // resource limit. Tags map[string]*string `locationName:"tags" type:"map"` // The end date and time of the training window. @@ -3745,7 +3709,7 @@ type CreateConfiguredAudienceModelInput struct { Description *string `locationName:"description" type:"string"` // The minimum number of users from the seed audience that must match with users - // in the training data of the audience model. + // in the training data of the audience model. The default value is 500. MinMatchingSeedSize *int64 `locationName:"minMatchingSeedSize" min:"25" type:"integer"` // The name of the configured audience model. @@ -3792,10 +3756,10 @@ type CreateConfiguredAudienceModelInput struct { // * Do not use aws:, AWS:, or any upper or lowercase combination of such // as a prefix for keys as it is reserved for AWS use. You cannot edit or // delete tag keys with this prefix. Values can have this prefix. If a tag - // value has aws as its prefix but the key does not, then Forecast considers - // it to be a user tag and will count against the limit of 50 tags. Tags - // with only the key prefix of aws do not count against your tags per resource - // limit. + // value has aws as its prefix but the key does not, then Clean Rooms ML + // considers it to be a user tag and will count against the limit of 50 tags. + // Tags with only the key prefix of aws do not count against your tags per + // resource limit. Tags map[string]*string `locationName:"tags" type:"map"` } @@ -4770,7 +4734,8 @@ type GetAudienceGenerationJobOutput struct { // from the seed. IncludeSeedInOutput *bool `locationName:"includeSeedInOutput" type:"boolean"` - // The relevance scores for different audience sizes. + // The relevance scores for different audience sizes and the recall score of + // the generated audience. Metrics *AudienceQualityMetrics `locationName:"metrics" type:"structure"` // The name of the audience generation job. @@ -4974,9 +4939,6 @@ type GetAudienceModelOutput struct { // The KMS key ARN used for the audience model. KmsKeyArn *string `locationName:"kmsKeyArn" min:"20" type:"string"` - // Accuracy metrics for the model. - Metrics []*AudienceModelMetric `locationName:"metrics" type:"list"` - // The name of the audience model. // // Name is a required field @@ -5053,12 +5015,6 @@ func (s *GetAudienceModelOutput) SetKmsKeyArn(v string) *GetAudienceModelOutput return s } -// SetMetrics sets the Metrics field's value. -func (s *GetAudienceModelOutput) SetMetrics(v []*AudienceModelMetric) *GetAudienceModelOutput { - s.Metrics = v - return s -} - // SetName sets the Name field's value. func (s *GetAudienceModelOutput) SetName(v string) *GetAudienceModelOutput { s.Name = &v @@ -6821,10 +6777,10 @@ type StartAudienceGenerationJobInput struct { // * Do not use aws:, AWS:, or any upper or lowercase combination of such // as a prefix for keys as it is reserved for AWS use. You cannot edit or // delete tag keys with this prefix. Values can have this prefix. If a tag - // value has aws as its prefix but the key does not, then Forecast considers - // it to be a user tag and will count against the limit of 50 tags. Tags - // with only the key prefix of aws do not count against your tags per resource - // limit. + // value has aws as its prefix but the key does not, then Clean Rooms ML + // considers it to be a user tag and will count against the limit of 50 tags. + // Tags with only the key prefix of aws do not count against your tags per + // resource limit. Tags map[string]*string `locationName:"tags" type:"map"` } @@ -7031,7 +6987,7 @@ type TagResourceInput struct { // * Do not use aws:, AWS:, or any upper or lowercase combination of such // as a prefix for keys as it is reserved for AWS use. You cannot edit or // delete tag keys with this prefix. Values can have this prefix. If a tag - // value has aws as its prefix but the key does not, then Forecast considers + // value has aws as its prefix but the key does not, then Clean Rooms considers // it to be a user tag and will count against the limit of 50 tags. Tags // with only the key prefix of aws do not count against your tags per resource // limit. @@ -7567,30 +7523,6 @@ func AudienceGenerationJobStatus_Values() []string { } } -const ( - // AudienceModelMetricTypeNormalizedDiscountedCumulativeGain is a AudienceModelMetricType enum value - AudienceModelMetricTypeNormalizedDiscountedCumulativeGain = "NORMALIZED_DISCOUNTED_CUMULATIVE_GAIN" - - // AudienceModelMetricTypeMeanReciprocalRank is a AudienceModelMetricType enum value - AudienceModelMetricTypeMeanReciprocalRank = "MEAN_RECIPROCAL_RANK" - - // AudienceModelMetricTypePrecision is a AudienceModelMetricType enum value - AudienceModelMetricTypePrecision = "PRECISION" - - // AudienceModelMetricTypeRecall is a AudienceModelMetricType enum value - AudienceModelMetricTypeRecall = "RECALL" -) - -// AudienceModelMetricType_Values returns all elements of the AudienceModelMetricType enum -func AudienceModelMetricType_Values() []string { - return []string{ - AudienceModelMetricTypeNormalizedDiscountedCumulativeGain, - AudienceModelMetricTypeMeanReciprocalRank, - AudienceModelMetricTypePrecision, - AudienceModelMetricTypeRecall, - } -} - const ( // AudienceModelStatusCreatePending is a AudienceModelStatus enum value AudienceModelStatusCreatePending = "CREATE_PENDING" diff --git a/service/cleanroomsml/errors.go b/service/cleanroomsml/errors.go index aaf11068cbb..3e1004aae3b 100644 --- a/service/cleanroomsml/errors.go +++ b/service/cleanroomsml/errors.go @@ -17,7 +17,7 @@ const ( // ErrCodeConflictException for service response error code // "ConflictException". // - // A resource with that name already exists in this region. + // You can't complete this action because another resource depends on this resource. ErrCodeConflictException = "ConflictException" // ErrCodeResourceNotFoundException for service response error code diff --git a/service/cloudformation/api.go b/service/cloudformation/api.go index 6753b2d8d56..36582c1495c 100644 --- a/service/cloudformation/api.go +++ b/service/cloudformation/api.go @@ -148,7 +148,7 @@ func (c *CloudFormation) ActivateTypeRequest(input *ActivateTypeInput) (req *req // Once you have activated a public third-party extension in your account and // Region, use SetTypeConfiguration (https://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_SetTypeConfiguration.html) // to specify configuration properties for the extension. For more information, -// see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-register.html#registry-set-configuration) +// see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-private.html#registry-set-configuration) // in the CloudFormation User Guide. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -234,7 +234,7 @@ func (c *CloudFormation) BatchDescribeTypeConfigurationsRequest(input *BatchDesc // Returns configuration data for the specified CloudFormation extensions, from // the CloudFormation registry for the account and Region. // -// For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-register.html#registry-set-configuration) +// For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-private.html#registry-set-configuration) // in the CloudFormation User Guide. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -6954,7 +6954,7 @@ func (c *CloudFormation) RegisterTypeRequest(input *RegisterTypeInput) (req *req // Once you have registered a private extension in your account and Region, // use SetTypeConfiguration (https://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_SetTypeConfiguration.html) // to specify configuration properties for the extension. For more information, -// see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-register.html#registry-set-configuration) +// see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-private.html#registry-set-configuration) // in the CloudFormation User Guide. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -7209,7 +7209,7 @@ func (c *CloudFormation) SetTypeConfigurationRequest(input *SetTypeConfiguration // // To view the current configuration data for an extension, refer to the ConfigurationSchema // element of DescribeType (https://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_DescribeType.html). -// For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-register.html#registry-set-configuration) +// For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-private.html#registry-set-configuration) // in the CloudFormation User Guide. // // It's strongly recommended that you use dynamic references to restrict sensitive @@ -14289,7 +14289,7 @@ type DescribeTypeOutput struct { // // To set the configuration data for an extension, use SetTypeConfiguration // (https://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_SetTypeConfiguration.html). - // For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-register.html#registry-set-configuration) + // For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-private.html#registry-set-configuration) // in the CloudFormation User Guide. ConfigurationSchema *string `min:"1" type:"string"` @@ -19950,6 +19950,23 @@ type ResourceChange struct { // don't have physical IDs because they haven't been created. PhysicalResourceId *string `type:"string"` + // The action that will be taken on the physical resource when the change set + // is executed. + // + // * Delete The resource will be deleted. + // + // * Retain The resource will be retained. + // + // * Snapshot The resource will have a snapshot taken. + // + // * ReplaceAndDelete The resource will be replaced and then deleted. + // + // * ReplaceAndRetain The resource will be replaced and then retained. + // + // * ReplaceAndSnapshot The resource will be replaced and then have a snapshot + // taken. + PolicyAction *string `type:"string" enum:"PolicyAction"` + // For the Modify action, indicates whether CloudFormation will replace the // resource by creating a new one and deleting the old one. This value depends // on the value of the RequiresRecreation property in the ResourceTargetDefinition @@ -20026,6 +20043,12 @@ func (s *ResourceChange) SetPhysicalResourceId(v string) *ResourceChange { return s } +// SetPolicyAction sets the PolicyAction field's value. +func (s *ResourceChange) SetPolicyAction(v string) *ResourceChange { + s.PolicyAction = &v + return s +} + // SetReplacement sets the Replacement field's value. func (s *ResourceChange) SetReplacement(v string) *ResourceChange { s.Replacement = &v @@ -25439,7 +25462,7 @@ func (s *TestTypeOutput) SetTypeVersionArn(v string) *TestTypeOutput { // Detailed information concerning the specification of a CloudFormation extension // in a given account and Region. // -// For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-register.html#registry-set-configuration) +// For more information, see Configuring extensions at the account level (https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/registry-private.html#registry-set-configuration) // in the CloudFormation User Guide. type TypeConfigurationDetails struct { _ struct{} `type:"structure"` @@ -28387,6 +28410,38 @@ func PermissionModels_Values() []string { } } +const ( + // PolicyActionDelete is a PolicyAction enum value + PolicyActionDelete = "Delete" + + // PolicyActionRetain is a PolicyAction enum value + PolicyActionRetain = "Retain" + + // PolicyActionSnapshot is a PolicyAction enum value + PolicyActionSnapshot = "Snapshot" + + // PolicyActionReplaceAndDelete is a PolicyAction enum value + PolicyActionReplaceAndDelete = "ReplaceAndDelete" + + // PolicyActionReplaceAndRetain is a PolicyAction enum value + PolicyActionReplaceAndRetain = "ReplaceAndRetain" + + // PolicyActionReplaceAndSnapshot is a PolicyAction enum value + PolicyActionReplaceAndSnapshot = "ReplaceAndSnapshot" +) + +// PolicyAction_Values returns all elements of the PolicyAction enum +func PolicyAction_Values() []string { + return []string{ + PolicyActionDelete, + PolicyActionRetain, + PolicyActionSnapshot, + PolicyActionReplaceAndDelete, + PolicyActionReplaceAndRetain, + PolicyActionReplaceAndSnapshot, + } +} + const ( // ProvisioningTypeNonProvisionable is a ProvisioningType enum value ProvisioningTypeNonProvisionable = "NON_PROVISIONABLE" diff --git a/service/datazone/api.go b/service/datazone/api.go index d72b86c0a8b..f75808cbc12 100644 --- a/service/datazone/api.go +++ b/service/datazone/api.go @@ -3751,6 +3751,102 @@ func (c *DataZone) DeleteSubscriptionTargetWithContext(ctx aws.Context, input *D return out, req.Send() } +const opDeleteTimeSeriesDataPoints = "DeleteTimeSeriesDataPoints" + +// DeleteTimeSeriesDataPointsRequest generates a "aws/request.Request" representing the +// client's request for the DeleteTimeSeriesDataPoints operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See DeleteTimeSeriesDataPoints for more information on using the DeleteTimeSeriesDataPoints +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the DeleteTimeSeriesDataPointsRequest method. +// req, resp := client.DeleteTimeSeriesDataPointsRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/DeleteTimeSeriesDataPoints +func (c *DataZone) DeleteTimeSeriesDataPointsRequest(input *DeleteTimeSeriesDataPointsInput) (req *request.Request, output *DeleteTimeSeriesDataPointsOutput) { + op := &request.Operation{ + Name: opDeleteTimeSeriesDataPoints, + HTTPMethod: "DELETE", + HTTPPath: "/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points", + } + + if input == nil { + input = &DeleteTimeSeriesDataPointsInput{} + } + + output = &DeleteTimeSeriesDataPointsOutput{} + req = c.newRequest(op, input, output) + req.Handlers.Unmarshal.Swap(restjson.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) + return +} + +// DeleteTimeSeriesDataPoints API operation for Amazon DataZone. +// +// Deletes the specified time series form for the specified asset. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon DataZone's +// API operation DeleteTimeSeriesDataPoints for usage and error information. +// +// Returned Error Types: +// +// - InternalServerException +// The request has failed because of an unknown error, exception or failure. +// +// - ResourceNotFoundException +// The specified resource cannot be found. +// +// - AccessDeniedException +// You do not have sufficient access to perform this action. +// +// - ThrottlingException +// The request was denied due to request throttling. +// +// - ValidationException +// The input fails to satisfy the constraints specified by the Amazon Web Services +// service. +// +// - UnauthorizedException +// You do not have permission to perform this action. +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/DeleteTimeSeriesDataPoints +func (c *DataZone) DeleteTimeSeriesDataPoints(input *DeleteTimeSeriesDataPointsInput) (*DeleteTimeSeriesDataPointsOutput, error) { + req, out := c.DeleteTimeSeriesDataPointsRequest(input) + return out, req.Send() +} + +// DeleteTimeSeriesDataPointsWithContext is the same as DeleteTimeSeriesDataPoints with the addition of +// the ability to pass a context and additional request options. +// +// See DeleteTimeSeriesDataPoints for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *DataZone) DeleteTimeSeriesDataPointsWithContext(ctx aws.Context, input *DeleteTimeSeriesDataPointsInput, opts ...request.Option) (*DeleteTimeSeriesDataPointsOutput, error) { + req, out := c.DeleteTimeSeriesDataPointsRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opGetAsset = "GetAsset" // GetAssetRequest generates a "aws/request.Request" representing the @@ -5764,6 +5860,101 @@ func (c *DataZone) GetSubscriptionTargetWithContext(ctx aws.Context, input *GetS return out, req.Send() } +const opGetTimeSeriesDataPoint = "GetTimeSeriesDataPoint" + +// GetTimeSeriesDataPointRequest generates a "aws/request.Request" representing the +// client's request for the GetTimeSeriesDataPoint operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See GetTimeSeriesDataPoint for more information on using the GetTimeSeriesDataPoint +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the GetTimeSeriesDataPointRequest method. +// req, resp := client.GetTimeSeriesDataPointRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/GetTimeSeriesDataPoint +func (c *DataZone) GetTimeSeriesDataPointRequest(input *GetTimeSeriesDataPointInput) (req *request.Request, output *GetTimeSeriesDataPointOutput) { + op := &request.Operation{ + Name: opGetTimeSeriesDataPoint, + HTTPMethod: "GET", + HTTPPath: "/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points/{identifier}", + } + + if input == nil { + input = &GetTimeSeriesDataPointInput{} + } + + output = &GetTimeSeriesDataPointOutput{} + req = c.newRequest(op, input, output) + return +} + +// GetTimeSeriesDataPoint API operation for Amazon DataZone. +// +// Gets the existing data point for the asset. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon DataZone's +// API operation GetTimeSeriesDataPoint for usage and error information. +// +// Returned Error Types: +// +// - InternalServerException +// The request has failed because of an unknown error, exception or failure. +// +// - ResourceNotFoundException +// The specified resource cannot be found. +// +// - AccessDeniedException +// You do not have sufficient access to perform this action. +// +// - ThrottlingException +// The request was denied due to request throttling. +// +// - ValidationException +// The input fails to satisfy the constraints specified by the Amazon Web Services +// service. +// +// - UnauthorizedException +// You do not have permission to perform this action. +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/GetTimeSeriesDataPoint +func (c *DataZone) GetTimeSeriesDataPoint(input *GetTimeSeriesDataPointInput) (*GetTimeSeriesDataPointOutput, error) { + req, out := c.GetTimeSeriesDataPointRequest(input) + return out, req.Send() +} + +// GetTimeSeriesDataPointWithContext is the same as GetTimeSeriesDataPoint with the addition of +// the ability to pass a context and additional request options. +// +// See GetTimeSeriesDataPoint for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *DataZone) GetTimeSeriesDataPointWithContext(ctx aws.Context, input *GetTimeSeriesDataPointInput, opts ...request.Option) (*GetTimeSeriesDataPointOutput, error) { + req, out := c.GetTimeSeriesDataPointRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opGetUserProfile = "GetUserProfile" // GetUserProfileRequest generates a "aws/request.Request" representing the @@ -8553,58 +8744,63 @@ func (c *DataZone) ListTagsForResourceWithContext(ctx aws.Context, input *ListTa return out, req.Send() } -const opPutEnvironmentBlueprintConfiguration = "PutEnvironmentBlueprintConfiguration" +const opListTimeSeriesDataPoints = "ListTimeSeriesDataPoints" -// PutEnvironmentBlueprintConfigurationRequest generates a "aws/request.Request" representing the -// client's request for the PutEnvironmentBlueprintConfiguration operation. The "output" return +// ListTimeSeriesDataPointsRequest generates a "aws/request.Request" representing the +// client's request for the ListTimeSeriesDataPoints operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // -// See PutEnvironmentBlueprintConfiguration for more information on using the PutEnvironmentBlueprintConfiguration +// See ListTimeSeriesDataPoints for more information on using the ListTimeSeriesDataPoints // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // -// // Example sending a request using the PutEnvironmentBlueprintConfigurationRequest method. -// req, resp := client.PutEnvironmentBlueprintConfigurationRequest(params) +// // Example sending a request using the ListTimeSeriesDataPointsRequest method. +// req, resp := client.ListTimeSeriesDataPointsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // -// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/PutEnvironmentBlueprintConfiguration -func (c *DataZone) PutEnvironmentBlueprintConfigurationRequest(input *PutEnvironmentBlueprintConfigurationInput) (req *request.Request, output *PutEnvironmentBlueprintConfigurationOutput) { +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/ListTimeSeriesDataPoints +func (c *DataZone) ListTimeSeriesDataPointsRequest(input *ListTimeSeriesDataPointsInput) (req *request.Request, output *ListTimeSeriesDataPointsOutput) { op := &request.Operation{ - Name: opPutEnvironmentBlueprintConfiguration, - HTTPMethod: "PUT", - HTTPPath: "/v2/domains/{domainIdentifier}/environment-blueprint-configurations/{environmentBlueprintIdentifier}", + Name: opListTimeSeriesDataPoints, + HTTPMethod: "GET", + HTTPPath: "/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points", + Paginator: &request.Paginator{ + InputTokens: []string{"nextToken"}, + OutputTokens: []string{"nextToken"}, + LimitToken: "maxResults", + TruncationToken: "", + }, } if input == nil { - input = &PutEnvironmentBlueprintConfigurationInput{} + input = &ListTimeSeriesDataPointsInput{} } - output = &PutEnvironmentBlueprintConfigurationOutput{} + output = &ListTimeSeriesDataPointsOutput{} req = c.newRequest(op, input, output) return } -// PutEnvironmentBlueprintConfiguration API operation for Amazon DataZone. +// ListTimeSeriesDataPoints API operation for Amazon DataZone. // -// Writes the configuration for the specified environment blueprint in Amazon -// DataZone. +// Lists time series data points. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon DataZone's -// API operation PutEnvironmentBlueprintConfiguration for usage and error information. +// API operation ListTimeSeriesDataPoints for usage and error information. // // Returned Error Types: // @@ -8620,9 +8816,6 @@ func (c *DataZone) PutEnvironmentBlueprintConfigurationRequest(input *PutEnviron // - ThrottlingException // The request was denied due to request throttling. // -// - ConflictException -// There is a conflict while performing this action. -// // - ValidationException // The input fails to satisfy the constraints specified by the Amazon Web Services // service. @@ -8630,80 +8823,130 @@ func (c *DataZone) PutEnvironmentBlueprintConfigurationRequest(input *PutEnviron // - UnauthorizedException // You do not have permission to perform this action. // -// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/PutEnvironmentBlueprintConfiguration -func (c *DataZone) PutEnvironmentBlueprintConfiguration(input *PutEnvironmentBlueprintConfigurationInput) (*PutEnvironmentBlueprintConfigurationOutput, error) { - req, out := c.PutEnvironmentBlueprintConfigurationRequest(input) +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/ListTimeSeriesDataPoints +func (c *DataZone) ListTimeSeriesDataPoints(input *ListTimeSeriesDataPointsInput) (*ListTimeSeriesDataPointsOutput, error) { + req, out := c.ListTimeSeriesDataPointsRequest(input) return out, req.Send() } -// PutEnvironmentBlueprintConfigurationWithContext is the same as PutEnvironmentBlueprintConfiguration with the addition of +// ListTimeSeriesDataPointsWithContext is the same as ListTimeSeriesDataPoints with the addition of // the ability to pass a context and additional request options. // -// See PutEnvironmentBlueprintConfiguration for details on how to use this API operation. +// See ListTimeSeriesDataPoints for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. -func (c *DataZone) PutEnvironmentBlueprintConfigurationWithContext(ctx aws.Context, input *PutEnvironmentBlueprintConfigurationInput, opts ...request.Option) (*PutEnvironmentBlueprintConfigurationOutput, error) { - req, out := c.PutEnvironmentBlueprintConfigurationRequest(input) +func (c *DataZone) ListTimeSeriesDataPointsWithContext(ctx aws.Context, input *ListTimeSeriesDataPointsInput, opts ...request.Option) (*ListTimeSeriesDataPointsOutput, error) { + req, out := c.ListTimeSeriesDataPointsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } -const opRejectPredictions = "RejectPredictions" +// ListTimeSeriesDataPointsPages iterates over the pages of a ListTimeSeriesDataPoints operation, +// calling the "fn" function with the response data for each page. To stop +// iterating, return false from the fn function. +// +// See ListTimeSeriesDataPoints method for more information on how to use this operation. +// +// Note: This operation can generate multiple requests to a service. +// +// // Example iterating over at most 3 pages of a ListTimeSeriesDataPoints operation. +// pageNum := 0 +// err := client.ListTimeSeriesDataPointsPages(params, +// func(page *datazone.ListTimeSeriesDataPointsOutput, lastPage bool) bool { +// pageNum++ +// fmt.Println(page) +// return pageNum <= 3 +// }) +func (c *DataZone) ListTimeSeriesDataPointsPages(input *ListTimeSeriesDataPointsInput, fn func(*ListTimeSeriesDataPointsOutput, bool) bool) error { + return c.ListTimeSeriesDataPointsPagesWithContext(aws.BackgroundContext(), input, fn) +} -// RejectPredictionsRequest generates a "aws/request.Request" representing the -// client's request for the RejectPredictions operation. The "output" return +// ListTimeSeriesDataPointsPagesWithContext same as ListTimeSeriesDataPointsPages except +// it takes a Context and allows setting request options on the pages. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *DataZone) ListTimeSeriesDataPointsPagesWithContext(ctx aws.Context, input *ListTimeSeriesDataPointsInput, fn func(*ListTimeSeriesDataPointsOutput, bool) bool, opts ...request.Option) error { + p := request.Pagination{ + NewRequest: func() (*request.Request, error) { + var inCpy *ListTimeSeriesDataPointsInput + if input != nil { + tmp := *input + inCpy = &tmp + } + req, _ := c.ListTimeSeriesDataPointsRequest(inCpy) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return req, nil + }, + } + + for p.Next() { + if !fn(p.Page().(*ListTimeSeriesDataPointsOutput), !p.HasNextPage()) { + break + } + } + + return p.Err() +} + +const opPostTimeSeriesDataPoints = "PostTimeSeriesDataPoints" + +// PostTimeSeriesDataPointsRequest generates a "aws/request.Request" representing the +// client's request for the PostTimeSeriesDataPoints operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // -// See RejectPredictions for more information on using the RejectPredictions +// See PostTimeSeriesDataPoints for more information on using the PostTimeSeriesDataPoints // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // -// // Example sending a request using the RejectPredictionsRequest method. -// req, resp := client.RejectPredictionsRequest(params) +// // Example sending a request using the PostTimeSeriesDataPointsRequest method. +// req, resp := client.PostTimeSeriesDataPointsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // -// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/RejectPredictions -func (c *DataZone) RejectPredictionsRequest(input *RejectPredictionsInput) (req *request.Request, output *RejectPredictionsOutput) { +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/PostTimeSeriesDataPoints +func (c *DataZone) PostTimeSeriesDataPointsRequest(input *PostTimeSeriesDataPointsInput) (req *request.Request, output *PostTimeSeriesDataPointsOutput) { op := &request.Operation{ - Name: opRejectPredictions, - HTTPMethod: "PUT", - HTTPPath: "/v2/domains/{domainIdentifier}/assets/{identifier}/reject-predictions", + Name: opPostTimeSeriesDataPoints, + HTTPMethod: "POST", + HTTPPath: "/v2/domains/{domainIdentifier}/entities/{entityType}/{entityIdentifier}/time-series-data-points", } if input == nil { - input = &RejectPredictionsInput{} + input = &PostTimeSeriesDataPointsInput{} } - output = &RejectPredictionsOutput{} + output = &PostTimeSeriesDataPointsOutput{} req = c.newRequest(op, input, output) return } -// RejectPredictions API operation for Amazon DataZone. +// PostTimeSeriesDataPoints API operation for Amazon DataZone. // -// Rejects automatically generated business-friendly metadata for your Amazon -// DataZone assets. +// Posts time series data points to Amazon DataZone for the specified asset. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon DataZone's -// API operation RejectPredictions for usage and error information. +// API operation PostTimeSeriesDataPoints for usage and error information. // // Returned Error Types: // @@ -8719,6 +8962,9 @@ func (c *DataZone) RejectPredictionsRequest(input *RejectPredictionsInput) (req // - ThrottlingException // The request was denied due to request throttling. // +// - ServiceQuotaExceededException +// The request has exceeded the specified service quota. +// // - ConflictException // There is a conflict while performing this action. // @@ -8729,79 +8975,277 @@ func (c *DataZone) RejectPredictionsRequest(input *RejectPredictionsInput) (req // - UnauthorizedException // You do not have permission to perform this action. // -// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/RejectPredictions -func (c *DataZone) RejectPredictions(input *RejectPredictionsInput) (*RejectPredictionsOutput, error) { - req, out := c.RejectPredictionsRequest(input) +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/PostTimeSeriesDataPoints +func (c *DataZone) PostTimeSeriesDataPoints(input *PostTimeSeriesDataPointsInput) (*PostTimeSeriesDataPointsOutput, error) { + req, out := c.PostTimeSeriesDataPointsRequest(input) return out, req.Send() } -// RejectPredictionsWithContext is the same as RejectPredictions with the addition of +// PostTimeSeriesDataPointsWithContext is the same as PostTimeSeriesDataPoints with the addition of // the ability to pass a context and additional request options. // -// See RejectPredictions for details on how to use this API operation. +// See PostTimeSeriesDataPoints for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. -func (c *DataZone) RejectPredictionsWithContext(ctx aws.Context, input *RejectPredictionsInput, opts ...request.Option) (*RejectPredictionsOutput, error) { - req, out := c.RejectPredictionsRequest(input) +func (c *DataZone) PostTimeSeriesDataPointsWithContext(ctx aws.Context, input *PostTimeSeriesDataPointsInput, opts ...request.Option) (*PostTimeSeriesDataPointsOutput, error) { + req, out := c.PostTimeSeriesDataPointsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } -const opRejectSubscriptionRequest = "RejectSubscriptionRequest" +const opPutEnvironmentBlueprintConfiguration = "PutEnvironmentBlueprintConfiguration" -// RejectSubscriptionRequestRequest generates a "aws/request.Request" representing the -// client's request for the RejectSubscriptionRequest operation. The "output" return +// PutEnvironmentBlueprintConfigurationRequest generates a "aws/request.Request" representing the +// client's request for the PutEnvironmentBlueprintConfiguration operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // -// See RejectSubscriptionRequest for more information on using the RejectSubscriptionRequest +// See PutEnvironmentBlueprintConfiguration for more information on using the PutEnvironmentBlueprintConfiguration // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // -// // Example sending a request using the RejectSubscriptionRequestRequest method. -// req, resp := client.RejectSubscriptionRequestRequest(params) +// // Example sending a request using the PutEnvironmentBlueprintConfigurationRequest method. +// req, resp := client.PutEnvironmentBlueprintConfigurationRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // -// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/RejectSubscriptionRequest -func (c *DataZone) RejectSubscriptionRequestRequest(input *RejectSubscriptionRequestInput) (req *request.Request, output *RejectSubscriptionRequestOutput) { +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/PutEnvironmentBlueprintConfiguration +func (c *DataZone) PutEnvironmentBlueprintConfigurationRequest(input *PutEnvironmentBlueprintConfigurationInput) (req *request.Request, output *PutEnvironmentBlueprintConfigurationOutput) { op := &request.Operation{ - Name: opRejectSubscriptionRequest, + Name: opPutEnvironmentBlueprintConfiguration, HTTPMethod: "PUT", - HTTPPath: "/v2/domains/{domainIdentifier}/subscription-requests/{identifier}/reject", + HTTPPath: "/v2/domains/{domainIdentifier}/environment-blueprint-configurations/{environmentBlueprintIdentifier}", } if input == nil { - input = &RejectSubscriptionRequestInput{} + input = &PutEnvironmentBlueprintConfigurationInput{} } - output = &RejectSubscriptionRequestOutput{} + output = &PutEnvironmentBlueprintConfigurationOutput{} req = c.newRequest(op, input, output) return } -// RejectSubscriptionRequest API operation for Amazon DataZone. +// PutEnvironmentBlueprintConfiguration API operation for Amazon DataZone. // -// Rejects the specified subscription request. +// Writes the configuration for the specified environment blueprint in Amazon +// DataZone. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon DataZone's -// API operation RejectSubscriptionRequest for usage and error information. +// API operation PutEnvironmentBlueprintConfiguration for usage and error information. +// +// Returned Error Types: +// +// - InternalServerException +// The request has failed because of an unknown error, exception or failure. +// +// - ResourceNotFoundException +// The specified resource cannot be found. +// +// - AccessDeniedException +// You do not have sufficient access to perform this action. +// +// - ThrottlingException +// The request was denied due to request throttling. +// +// - ConflictException +// There is a conflict while performing this action. +// +// - ValidationException +// The input fails to satisfy the constraints specified by the Amazon Web Services +// service. +// +// - UnauthorizedException +// You do not have permission to perform this action. +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/PutEnvironmentBlueprintConfiguration +func (c *DataZone) PutEnvironmentBlueprintConfiguration(input *PutEnvironmentBlueprintConfigurationInput) (*PutEnvironmentBlueprintConfigurationOutput, error) { + req, out := c.PutEnvironmentBlueprintConfigurationRequest(input) + return out, req.Send() +} + +// PutEnvironmentBlueprintConfigurationWithContext is the same as PutEnvironmentBlueprintConfiguration with the addition of +// the ability to pass a context and additional request options. +// +// See PutEnvironmentBlueprintConfiguration for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *DataZone) PutEnvironmentBlueprintConfigurationWithContext(ctx aws.Context, input *PutEnvironmentBlueprintConfigurationInput, opts ...request.Option) (*PutEnvironmentBlueprintConfigurationOutput, error) { + req, out := c.PutEnvironmentBlueprintConfigurationRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + +const opRejectPredictions = "RejectPredictions" + +// RejectPredictionsRequest generates a "aws/request.Request" representing the +// client's request for the RejectPredictions operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See RejectPredictions for more information on using the RejectPredictions +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the RejectPredictionsRequest method. +// req, resp := client.RejectPredictionsRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/RejectPredictions +func (c *DataZone) RejectPredictionsRequest(input *RejectPredictionsInput) (req *request.Request, output *RejectPredictionsOutput) { + op := &request.Operation{ + Name: opRejectPredictions, + HTTPMethod: "PUT", + HTTPPath: "/v2/domains/{domainIdentifier}/assets/{identifier}/reject-predictions", + } + + if input == nil { + input = &RejectPredictionsInput{} + } + + output = &RejectPredictionsOutput{} + req = c.newRequest(op, input, output) + return +} + +// RejectPredictions API operation for Amazon DataZone. +// +// Rejects automatically generated business-friendly metadata for your Amazon +// DataZone assets. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon DataZone's +// API operation RejectPredictions for usage and error information. +// +// Returned Error Types: +// +// - InternalServerException +// The request has failed because of an unknown error, exception or failure. +// +// - ResourceNotFoundException +// The specified resource cannot be found. +// +// - AccessDeniedException +// You do not have sufficient access to perform this action. +// +// - ThrottlingException +// The request was denied due to request throttling. +// +// - ConflictException +// There is a conflict while performing this action. +// +// - ValidationException +// The input fails to satisfy the constraints specified by the Amazon Web Services +// service. +// +// - UnauthorizedException +// You do not have permission to perform this action. +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/RejectPredictions +func (c *DataZone) RejectPredictions(input *RejectPredictionsInput) (*RejectPredictionsOutput, error) { + req, out := c.RejectPredictionsRequest(input) + return out, req.Send() +} + +// RejectPredictionsWithContext is the same as RejectPredictions with the addition of +// the ability to pass a context and additional request options. +// +// See RejectPredictions for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *DataZone) RejectPredictionsWithContext(ctx aws.Context, input *RejectPredictionsInput, opts ...request.Option) (*RejectPredictionsOutput, error) { + req, out := c.RejectPredictionsRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + +const opRejectSubscriptionRequest = "RejectSubscriptionRequest" + +// RejectSubscriptionRequestRequest generates a "aws/request.Request" representing the +// client's request for the RejectSubscriptionRequest operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See RejectSubscriptionRequest for more information on using the RejectSubscriptionRequest +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the RejectSubscriptionRequestRequest method. +// req, resp := client.RejectSubscriptionRequestRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/datazone-2018-05-10/RejectSubscriptionRequest +func (c *DataZone) RejectSubscriptionRequestRequest(input *RejectSubscriptionRequestInput) (req *request.Request, output *RejectSubscriptionRequestOutput) { + op := &request.Operation{ + Name: opRejectSubscriptionRequest, + HTTPMethod: "PUT", + HTTPPath: "/v2/domains/{domainIdentifier}/subscription-requests/{identifier}/reject", + } + + if input == nil { + input = &RejectSubscriptionRequestInput{} + } + + output = &RejectSubscriptionRequestOutput{} + req = c.newRequest(op, input, output) + return +} + +// RejectSubscriptionRequest API operation for Amazon DataZone. +// +// Rejects the specified subscription request. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon DataZone's +// API operation RejectSubscriptionRequest for usage and error information. // // Returned Error Types: // @@ -12054,6 +12498,10 @@ type AssetItemAdditionalAttributes struct { // The forms included in the additional attributes of an inventory asset. FormsOutput []*FormOutput_ `locationName:"formsOutput" type:"list"` + // The latest time series data points forms included in the additional attributes + // of an asset. + LatestTimeSeriesDataPointFormsOutput []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"latestTimeSeriesDataPointFormsOutput" type:"list"` + // The read-only forms included in the additional attributes of an inventory // asset. ReadOnlyFormsOutput []*FormOutput_ `locationName:"readOnlyFormsOutput" type:"list"` @@ -12083,6 +12531,12 @@ func (s *AssetItemAdditionalAttributes) SetFormsOutput(v []*FormOutput_) *AssetI return s } +// SetLatestTimeSeriesDataPointFormsOutput sets the LatestTimeSeriesDataPointFormsOutput field's value. +func (s *AssetItemAdditionalAttributes) SetLatestTimeSeriesDataPointFormsOutput(v []*TimeSeriesDataPointSummaryFormOutput_) *AssetItemAdditionalAttributes { + s.LatestTimeSeriesDataPointFormsOutput = v + return s +} + // SetReadOnlyFormsOutput sets the ReadOnlyFormsOutput field's value. func (s *AssetItemAdditionalAttributes) SetReadOnlyFormsOutput(v []*FormOutput_) *AssetItemAdditionalAttributes { s.ReadOnlyFormsOutput = v @@ -12112,6 +12566,10 @@ type AssetListing struct { // The glossary terms attached to an asset published in an Amazon DataZone catalog. GlossaryTerms []*DetailedGlossaryTerm `locationName:"glossaryTerms" type:"list"` + // The latest time series data points forms included in the additional attributes + // of an asset. + LatestTimeSeriesDataPointForms []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"latestTimeSeriesDataPointForms" type:"list"` + // The identifier of the project where an asset published in an Amazon DataZone // catalog exists. OwningProjectId *string `locationName:"owningProjectId" type:"string"` @@ -12171,6 +12629,12 @@ func (s *AssetListing) SetGlossaryTerms(v []*DetailedGlossaryTerm) *AssetListing return s } +// SetLatestTimeSeriesDataPointForms sets the LatestTimeSeriesDataPointForms field's value. +func (s *AssetListing) SetLatestTimeSeriesDataPointForms(v []*TimeSeriesDataPointSummaryFormOutput_) *AssetListing { + s.LatestTimeSeriesDataPointForms = v + return s +} + // SetOwningProjectId sets the OwningProjectId field's value. func (s *AssetListing) SetOwningProjectId(v string) *AssetListing { s.OwningProjectId = &v @@ -12377,6 +12841,10 @@ type AssetListingItemAdditionalAttributes struct { // The metadata forms that form additional attributes of the metadata asset. Forms *string `locationName:"forms" type:"string"` + + // The latest time series data points forms included in the additional attributes + // of an asset. + LatestTimeSeriesDataPointForms []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"latestTimeSeriesDataPointForms" type:"list"` } // String returns the string representation. @@ -12403,6 +12871,12 @@ func (s *AssetListingItemAdditionalAttributes) SetForms(v string) *AssetListingI return s } +// SetLatestTimeSeriesDataPointForms sets the LatestTimeSeriesDataPointForms field's value. +func (s *AssetListingItemAdditionalAttributes) SetLatestTimeSeriesDataPointForms(v []*TimeSeriesDataPointSummaryFormOutput_) *AssetListingItemAdditionalAttributes { + s.LatestTimeSeriesDataPointForms = v + return s +} + // The revision of an inventory asset. type AssetRevision struct { _ struct{} `type:"structure"` @@ -13449,6 +13923,10 @@ type CreateAssetOutput struct { // Id is a required field Id *string `locationName:"id" type:"string" required:"true"` + // The latest data point that was imported into the time series form for the + // asset. + LatestTimeSeriesDataPointFormsOutput []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"latestTimeSeriesDataPointFormsOutput" type:"list"` + // The details of an asset published in an Amazon DataZone catalog. Listing *AssetListingDetails `locationName:"listing" type:"structure"` @@ -13567,6 +14045,12 @@ func (s *CreateAssetOutput) SetId(v string) *CreateAssetOutput { return s } +// SetLatestTimeSeriesDataPointFormsOutput sets the LatestTimeSeriesDataPointFormsOutput field's value. +func (s *CreateAssetOutput) SetLatestTimeSeriesDataPointFormsOutput(v []*TimeSeriesDataPointSummaryFormOutput_) *CreateAssetOutput { + s.LatestTimeSeriesDataPointFormsOutput = v + return s +} + // SetListing sets the Listing field's value. func (s *CreateAssetOutput) SetListing(v *AssetListingDetails) *CreateAssetOutput { s.Listing = v @@ -13832,6 +14316,10 @@ type CreateAssetRevisionOutput struct { // Id is a required field Id *string `locationName:"id" type:"string" required:"true"` + // The latest data point that was imported into the time series form for the + // asset. + LatestTimeSeriesDataPointFormsOutput []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"latestTimeSeriesDataPointFormsOutput" type:"list"` + // The details of an asset published in an Amazon DataZone catalog. Listing *AssetListingDetails `locationName:"listing" type:"structure"` @@ -13951,6 +14439,12 @@ func (s *CreateAssetRevisionOutput) SetId(v string) *CreateAssetRevisionOutput { return s } +// SetLatestTimeSeriesDataPointFormsOutput sets the LatestTimeSeriesDataPointFormsOutput field's value. +func (s *CreateAssetRevisionOutput) SetLatestTimeSeriesDataPointFormsOutput(v []*TimeSeriesDataPointSummaryFormOutput_) *CreateAssetRevisionOutput { + s.LatestTimeSeriesDataPointFormsOutput = v + return s +} + // SetListing sets the Listing field's value. func (s *CreateAssetRevisionOutput) SetListing(v *AssetListingDetails) *CreateAssetRevisionOutput { s.Listing = v @@ -20654,6 +21148,142 @@ func (s DeleteSubscriptionTargetOutput) GoString() string { return s.String() } +type DeleteTimeSeriesDataPointsInput struct { + _ struct{} `type:"structure" nopayload:"true"` + + // A unique, case-sensitive identifier to ensure idempotency of the request. + // This field is automatically populated if not provided. + ClientToken *string `location:"querystring" locationName:"clientToken" min:"1" type:"string" idempotencyToken:"true"` + + // The ID of the Amazon DataZone domain that houses the asset for which you + // want to delete a time series form. + // + // DomainIdentifier is a required field + DomainIdentifier *string `location:"uri" locationName:"domainIdentifier" type:"string" required:"true"` + + // The ID of the asset for which you want to delete a time series form. + // + // EntityIdentifier is a required field + EntityIdentifier *string `location:"uri" locationName:"entityIdentifier" type:"string" required:"true"` + + // The type of the asset for which you want to delete a time series form. + // + // EntityType is a required field + EntityType *string `location:"uri" locationName:"entityType" type:"string" required:"true" enum:"TimeSeriesEntityType"` + + // The name of the time series form that you want to delete. + // + // FormName is a required field + FormName *string `location:"querystring" locationName:"formName" min:"1" type:"string" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s DeleteTimeSeriesDataPointsInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s DeleteTimeSeriesDataPointsInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *DeleteTimeSeriesDataPointsInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "DeleteTimeSeriesDataPointsInput"} + if s.ClientToken != nil && len(*s.ClientToken) < 1 { + invalidParams.Add(request.NewErrParamMinLen("ClientToken", 1)) + } + if s.DomainIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("DomainIdentifier")) + } + if s.DomainIdentifier != nil && len(*s.DomainIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("DomainIdentifier", 1)) + } + if s.EntityIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("EntityIdentifier")) + } + if s.EntityIdentifier != nil && len(*s.EntityIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityIdentifier", 1)) + } + if s.EntityType == nil { + invalidParams.Add(request.NewErrParamRequired("EntityType")) + } + if s.EntityType != nil && len(*s.EntityType) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityType", 1)) + } + if s.FormName == nil { + invalidParams.Add(request.NewErrParamRequired("FormName")) + } + if s.FormName != nil && len(*s.FormName) < 1 { + invalidParams.Add(request.NewErrParamMinLen("FormName", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetClientToken sets the ClientToken field's value. +func (s *DeleteTimeSeriesDataPointsInput) SetClientToken(v string) *DeleteTimeSeriesDataPointsInput { + s.ClientToken = &v + return s +} + +// SetDomainIdentifier sets the DomainIdentifier field's value. +func (s *DeleteTimeSeriesDataPointsInput) SetDomainIdentifier(v string) *DeleteTimeSeriesDataPointsInput { + s.DomainIdentifier = &v + return s +} + +// SetEntityIdentifier sets the EntityIdentifier field's value. +func (s *DeleteTimeSeriesDataPointsInput) SetEntityIdentifier(v string) *DeleteTimeSeriesDataPointsInput { + s.EntityIdentifier = &v + return s +} + +// SetEntityType sets the EntityType field's value. +func (s *DeleteTimeSeriesDataPointsInput) SetEntityType(v string) *DeleteTimeSeriesDataPointsInput { + s.EntityType = &v + return s +} + +// SetFormName sets the FormName field's value. +func (s *DeleteTimeSeriesDataPointsInput) SetFormName(v string) *DeleteTimeSeriesDataPointsInput { + s.FormName = &v + return s +} + +type DeleteTimeSeriesDataPointsOutput struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s DeleteTimeSeriesDataPointsOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s DeleteTimeSeriesDataPointsOutput) GoString() string { + return s.String() +} + // The details of the last deployment of the environment. type Deployment struct { _ struct{} `type:"structure"` @@ -22344,6 +22974,10 @@ type GetAssetOutput struct { // Id is a required field Id *string `locationName:"id" type:"string" required:"true"` + // The latest data point that was imported into the time series form for the + // asset. + LatestTimeSeriesDataPointFormsOutput []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"latestTimeSeriesDataPointFormsOutput" type:"list"` + // The listing of the asset. Listing *AssetListingDetails `locationName:"listing" type:"structure"` @@ -22458,6 +23092,12 @@ func (s *GetAssetOutput) SetId(v string) *GetAssetOutput { return s } +// SetLatestTimeSeriesDataPointFormsOutput sets the LatestTimeSeriesDataPointFormsOutput field's value. +func (s *GetAssetOutput) SetLatestTimeSeriesDataPointFormsOutput(v []*TimeSeriesDataPointSummaryFormOutput_) *GetAssetOutput { + s.LatestTimeSeriesDataPointFormsOutput = v + return s +} + // SetListing sets the Listing field's value. func (s *GetAssetOutput) SetListing(v *AssetListingDetails) *GetAssetOutput { s.Listing = v @@ -26640,6 +27280,194 @@ func (s *GetSubscriptionTargetOutput) SetUpdatedBy(v string) *GetSubscriptionTar return s } +type GetTimeSeriesDataPointInput struct { + _ struct{} `type:"structure" nopayload:"true"` + + // The ID of the Amazon DataZone domain that houses the asset for which you + // want to get the data point. + // + // DomainIdentifier is a required field + DomainIdentifier *string `location:"uri" locationName:"domainIdentifier" type:"string" required:"true"` + + // The ID of the asset for which you want to get the data point. + // + // EntityIdentifier is a required field + EntityIdentifier *string `location:"uri" locationName:"entityIdentifier" type:"string" required:"true"` + + // The type of the asset for which you want to get the data point. + // + // EntityType is a required field + EntityType *string `location:"uri" locationName:"entityType" type:"string" required:"true" enum:"TimeSeriesEntityType"` + + // The name of the time series form that houses the data point that you want + // to get. + // + // FormName is a required field + FormName *string `location:"querystring" locationName:"formName" min:"1" type:"string" required:"true"` + + // The ID of the data point that you want to get. + // + // Identifier is a required field + Identifier *string `location:"uri" locationName:"identifier" type:"string" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetTimeSeriesDataPointInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetTimeSeriesDataPointInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *GetTimeSeriesDataPointInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "GetTimeSeriesDataPointInput"} + if s.DomainIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("DomainIdentifier")) + } + if s.DomainIdentifier != nil && len(*s.DomainIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("DomainIdentifier", 1)) + } + if s.EntityIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("EntityIdentifier")) + } + if s.EntityIdentifier != nil && len(*s.EntityIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityIdentifier", 1)) + } + if s.EntityType == nil { + invalidParams.Add(request.NewErrParamRequired("EntityType")) + } + if s.EntityType != nil && len(*s.EntityType) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityType", 1)) + } + if s.FormName == nil { + invalidParams.Add(request.NewErrParamRequired("FormName")) + } + if s.FormName != nil && len(*s.FormName) < 1 { + invalidParams.Add(request.NewErrParamMinLen("FormName", 1)) + } + if s.Identifier == nil { + invalidParams.Add(request.NewErrParamRequired("Identifier")) + } + if s.Identifier != nil && len(*s.Identifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("Identifier", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetDomainIdentifier sets the DomainIdentifier field's value. +func (s *GetTimeSeriesDataPointInput) SetDomainIdentifier(v string) *GetTimeSeriesDataPointInput { + s.DomainIdentifier = &v + return s +} + +// SetEntityIdentifier sets the EntityIdentifier field's value. +func (s *GetTimeSeriesDataPointInput) SetEntityIdentifier(v string) *GetTimeSeriesDataPointInput { + s.EntityIdentifier = &v + return s +} + +// SetEntityType sets the EntityType field's value. +func (s *GetTimeSeriesDataPointInput) SetEntityType(v string) *GetTimeSeriesDataPointInput { + s.EntityType = &v + return s +} + +// SetFormName sets the FormName field's value. +func (s *GetTimeSeriesDataPointInput) SetFormName(v string) *GetTimeSeriesDataPointInput { + s.FormName = &v + return s +} + +// SetIdentifier sets the Identifier field's value. +func (s *GetTimeSeriesDataPointInput) SetIdentifier(v string) *GetTimeSeriesDataPointInput { + s.Identifier = &v + return s +} + +type GetTimeSeriesDataPointOutput struct { + _ struct{} `type:"structure"` + + // The ID of the Amazon DataZone domain that houses the asset data point that + // you want to get. + DomainId *string `locationName:"domainId" type:"string"` + + // The ID of the asset for which you want to get the data point. + EntityId *string `locationName:"entityId" type:"string"` + + // The type of the asset for which you want to get the data point. + EntityType *string `locationName:"entityType" type:"string" enum:"TimeSeriesEntityType"` + + // The time series form that houses the data point that you want to get. + Form *TimeSeriesDataPointFormOutput_ `locationName:"form" type:"structure"` + + // The name of the time series form that houses the data point that you want + // to get. + FormName *string `locationName:"formName" min:"1" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetTimeSeriesDataPointOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s GetTimeSeriesDataPointOutput) GoString() string { + return s.String() +} + +// SetDomainId sets the DomainId field's value. +func (s *GetTimeSeriesDataPointOutput) SetDomainId(v string) *GetTimeSeriesDataPointOutput { + s.DomainId = &v + return s +} + +// SetEntityId sets the EntityId field's value. +func (s *GetTimeSeriesDataPointOutput) SetEntityId(v string) *GetTimeSeriesDataPointOutput { + s.EntityId = &v + return s +} + +// SetEntityType sets the EntityType field's value. +func (s *GetTimeSeriesDataPointOutput) SetEntityType(v string) *GetTimeSeriesDataPointOutput { + s.EntityType = &v + return s +} + +// SetForm sets the Form field's value. +func (s *GetTimeSeriesDataPointOutput) SetForm(v *TimeSeriesDataPointFormOutput_) *GetTimeSeriesDataPointOutput { + s.Form = v + return s +} + +// SetFormName sets the FormName field's value. +func (s *GetTimeSeriesDataPointOutput) SetFormName(v string) *GetTimeSeriesDataPointOutput { + s.FormName = &v + return s +} + type GetUserProfileInput struct { _ struct{} `type:"structure" nopayload:"true"` @@ -27074,6 +27902,10 @@ func (s *GlossaryTermItem) SetUpdatedBy(v string) *GlossaryTermItem { type GlueRunConfigurationInput_ struct { _ struct{} `type:"structure"` + // Specifies whether to automatically import data quality metrics as part of + // the data source run. + AutoImportDataQualityResult *bool `locationName:"autoImportDataQualityResult" type:"boolean"` + // The data access role included in the configuration details of the Amazon // Web Services Glue data source. DataAccessRole *string `locationName:"dataAccessRole" type:"string"` @@ -27126,6 +27958,12 @@ func (s *GlueRunConfigurationInput_) Validate() error { return nil } +// SetAutoImportDataQualityResult sets the AutoImportDataQualityResult field's value. +func (s *GlueRunConfigurationInput_) SetAutoImportDataQualityResult(v bool) *GlueRunConfigurationInput_ { + s.AutoImportDataQualityResult = &v + return s +} + // SetDataAccessRole sets the DataAccessRole field's value. func (s *GlueRunConfigurationInput_) SetDataAccessRole(v string) *GlueRunConfigurationInput_ { s.DataAccessRole = &v @@ -27146,6 +27984,10 @@ type GlueRunConfigurationOutput_ struct { // of the Amazon Web Services Glue data source. AccountId *string `locationName:"accountId" min:"12" type:"string"` + // Specifies whether to automatically import data quality metrics as part of + // the data source run. + AutoImportDataQualityResult *bool `locationName:"autoImportDataQualityResult" type:"boolean"` + // The data access role included in the configuration details of the Amazon // Web Services Glue data source. DataAccessRole *string `locationName:"dataAccessRole" type:"string"` @@ -27185,6 +28027,12 @@ func (s *GlueRunConfigurationOutput_) SetAccountId(v string) *GlueRunConfigurati return s } +// SetAutoImportDataQualityResult sets the AutoImportDataQualityResult field's value. +func (s *GlueRunConfigurationOutput_) SetAutoImportDataQualityResult(v bool) *GlueRunConfigurationOutput_ { + s.AutoImportDataQualityResult = &v + return s +} + // SetDataAccessRole sets the DataAccessRole field's value. func (s *GlueRunConfigurationOutput_) SetDataAccessRole(v string) *GlueRunConfigurationOutput_ { s.DataAccessRole = &v @@ -30376,6 +31224,201 @@ func (s *ListTagsForResourceOutput) SetTags(v map[string]*string) *ListTagsForRe return s } +type ListTimeSeriesDataPointsInput struct { + _ struct{} `type:"structure" nopayload:"true"` + + // The ID of the Amazon DataZone domain that houses the assets for which you + // want to list time series data points. + // + // DomainIdentifier is a required field + DomainIdentifier *string `location:"uri" locationName:"domainIdentifier" type:"string" required:"true"` + + // The timestamp at which the data points that you wanted to list ended. + EndedAt *time.Time `location:"querystring" locationName:"endedAt" type:"timestamp"` + + // The ID of the asset for which you want to list data points. + // + // EntityIdentifier is a required field + EntityIdentifier *string `location:"uri" locationName:"entityIdentifier" type:"string" required:"true"` + + // The type of the asset for which you want to list data points. + // + // EntityType is a required field + EntityType *string `location:"uri" locationName:"entityType" type:"string" required:"true" enum:"TimeSeriesEntityType"` + + // The name of the time series data points form. + // + // FormName is a required field + FormName *string `location:"querystring" locationName:"formName" min:"1" type:"string" required:"true"` + + // The maximum number of data points to return in a single call to ListTimeSeriesDataPoints. + // When the number of data points to be listed is greater than the value of + // MaxResults, the response contains a NextToken value that you can use in a + // subsequent call to ListTimeSeriesDataPoints to list the next set of data + // points. + MaxResults *int64 `location:"querystring" locationName:"maxResults" min:"1" type:"integer"` + + // When the number of data points is greater than the default value for the + // MaxResults parameter, or if you explicitly specify a value for MaxResults + // that is less than the number of data points, the response includes a pagination + // token named NextToken. You can specify this NextToken value in a subsequent + // call to ListTimeSeriesDataPoints to list the next set of data points. + NextToken *string `location:"querystring" locationName:"nextToken" min:"1" type:"string"` + + // The timestamp at which the data points that you want to list started. + StartedAt *time.Time `location:"querystring" locationName:"startedAt" type:"timestamp"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListTimeSeriesDataPointsInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListTimeSeriesDataPointsInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *ListTimeSeriesDataPointsInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "ListTimeSeriesDataPointsInput"} + if s.DomainIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("DomainIdentifier")) + } + if s.DomainIdentifier != nil && len(*s.DomainIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("DomainIdentifier", 1)) + } + if s.EntityIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("EntityIdentifier")) + } + if s.EntityIdentifier != nil && len(*s.EntityIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityIdentifier", 1)) + } + if s.EntityType == nil { + invalidParams.Add(request.NewErrParamRequired("EntityType")) + } + if s.EntityType != nil && len(*s.EntityType) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityType", 1)) + } + if s.FormName == nil { + invalidParams.Add(request.NewErrParamRequired("FormName")) + } + if s.FormName != nil && len(*s.FormName) < 1 { + invalidParams.Add(request.NewErrParamMinLen("FormName", 1)) + } + if s.MaxResults != nil && *s.MaxResults < 1 { + invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) + } + if s.NextToken != nil && len(*s.NextToken) < 1 { + invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetDomainIdentifier sets the DomainIdentifier field's value. +func (s *ListTimeSeriesDataPointsInput) SetDomainIdentifier(v string) *ListTimeSeriesDataPointsInput { + s.DomainIdentifier = &v + return s +} + +// SetEndedAt sets the EndedAt field's value. +func (s *ListTimeSeriesDataPointsInput) SetEndedAt(v time.Time) *ListTimeSeriesDataPointsInput { + s.EndedAt = &v + return s +} + +// SetEntityIdentifier sets the EntityIdentifier field's value. +func (s *ListTimeSeriesDataPointsInput) SetEntityIdentifier(v string) *ListTimeSeriesDataPointsInput { + s.EntityIdentifier = &v + return s +} + +// SetEntityType sets the EntityType field's value. +func (s *ListTimeSeriesDataPointsInput) SetEntityType(v string) *ListTimeSeriesDataPointsInput { + s.EntityType = &v + return s +} + +// SetFormName sets the FormName field's value. +func (s *ListTimeSeriesDataPointsInput) SetFormName(v string) *ListTimeSeriesDataPointsInput { + s.FormName = &v + return s +} + +// SetMaxResults sets the MaxResults field's value. +func (s *ListTimeSeriesDataPointsInput) SetMaxResults(v int64) *ListTimeSeriesDataPointsInput { + s.MaxResults = &v + return s +} + +// SetNextToken sets the NextToken field's value. +func (s *ListTimeSeriesDataPointsInput) SetNextToken(v string) *ListTimeSeriesDataPointsInput { + s.NextToken = &v + return s +} + +// SetStartedAt sets the StartedAt field's value. +func (s *ListTimeSeriesDataPointsInput) SetStartedAt(v time.Time) *ListTimeSeriesDataPointsInput { + s.StartedAt = &v + return s +} + +type ListTimeSeriesDataPointsOutput struct { + _ struct{} `type:"structure"` + + // The results of the ListTimeSeriesDataPoints action. + Items []*TimeSeriesDataPointSummaryFormOutput_ `locationName:"items" type:"list"` + + // When the number of data points is greater than the default value for the + // MaxResults parameter, or if you explicitly specify a value for MaxResults + // that is less than the number of data points, the response includes a pagination + // token named NextToken. You can specify this NextToken value in a subsequent + // call to ListTimeSeriesDataPoints to list the next set of data points. + NextToken *string `locationName:"nextToken" min:"1" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListTimeSeriesDataPointsOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s ListTimeSeriesDataPointsOutput) GoString() string { + return s.String() +} + +// SetItems sets the Items field's value. +func (s *ListTimeSeriesDataPointsOutput) SetItems(v []*TimeSeriesDataPointSummaryFormOutput_) *ListTimeSeriesDataPointsOutput { + s.Items = v + return s +} + +// SetNextToken sets the NextToken field's value. +func (s *ListTimeSeriesDataPointsOutput) SetNextToken(v string) *ListTimeSeriesDataPointsOutput { + s.NextToken = &v + return s +} + // The details of a listing (aka asset published in a Amazon DataZone catalog). type ListingItem struct { _ struct{} `type:"structure"` @@ -31028,6 +32071,186 @@ func (s *NotificationResource) SetType(v string) *NotificationResource { return s } +type PostTimeSeriesDataPointsInput struct { + _ struct{} `type:"structure"` + + // A unique, case-sensitive identifier that is provided to ensure the idempotency + // of the request. + ClientToken *string `locationName:"clientToken" min:"1" type:"string" idempotencyToken:"true"` + + // The ID of the Amazon DataZone domain in which you want to post time series + // data points. + // + // DomainIdentifier is a required field + DomainIdentifier *string `location:"uri" locationName:"domainIdentifier" type:"string" required:"true"` + + // The ID of the asset for which you want to post time series data points. + // + // EntityIdentifier is a required field + EntityIdentifier *string `location:"uri" locationName:"entityIdentifier" type:"string" required:"true"` + + // The type of the asset for which you want to post data points. + // + // EntityType is a required field + EntityType *string `location:"uri" locationName:"entityType" type:"string" required:"true" enum:"TimeSeriesEntityType"` + + // The forms that contain the data points that you want to post. + // + // Forms is a required field + Forms []*TimeSeriesDataPointFormInput_ `locationName:"forms" type:"list" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s PostTimeSeriesDataPointsInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s PostTimeSeriesDataPointsInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *PostTimeSeriesDataPointsInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "PostTimeSeriesDataPointsInput"} + if s.ClientToken != nil && len(*s.ClientToken) < 1 { + invalidParams.Add(request.NewErrParamMinLen("ClientToken", 1)) + } + if s.DomainIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("DomainIdentifier")) + } + if s.DomainIdentifier != nil && len(*s.DomainIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("DomainIdentifier", 1)) + } + if s.EntityIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("EntityIdentifier")) + } + if s.EntityIdentifier != nil && len(*s.EntityIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityIdentifier", 1)) + } + if s.EntityType == nil { + invalidParams.Add(request.NewErrParamRequired("EntityType")) + } + if s.EntityType != nil && len(*s.EntityType) < 1 { + invalidParams.Add(request.NewErrParamMinLen("EntityType", 1)) + } + if s.Forms == nil { + invalidParams.Add(request.NewErrParamRequired("Forms")) + } + if s.Forms != nil { + for i, v := range s.Forms { + if v == nil { + continue + } + if err := v.Validate(); err != nil { + invalidParams.AddNested(fmt.Sprintf("%s[%v]", "Forms", i), err.(request.ErrInvalidParams)) + } + } + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetClientToken sets the ClientToken field's value. +func (s *PostTimeSeriesDataPointsInput) SetClientToken(v string) *PostTimeSeriesDataPointsInput { + s.ClientToken = &v + return s +} + +// SetDomainIdentifier sets the DomainIdentifier field's value. +func (s *PostTimeSeriesDataPointsInput) SetDomainIdentifier(v string) *PostTimeSeriesDataPointsInput { + s.DomainIdentifier = &v + return s +} + +// SetEntityIdentifier sets the EntityIdentifier field's value. +func (s *PostTimeSeriesDataPointsInput) SetEntityIdentifier(v string) *PostTimeSeriesDataPointsInput { + s.EntityIdentifier = &v + return s +} + +// SetEntityType sets the EntityType field's value. +func (s *PostTimeSeriesDataPointsInput) SetEntityType(v string) *PostTimeSeriesDataPointsInput { + s.EntityType = &v + return s +} + +// SetForms sets the Forms field's value. +func (s *PostTimeSeriesDataPointsInput) SetForms(v []*TimeSeriesDataPointFormInput_) *PostTimeSeriesDataPointsInput { + s.Forms = v + return s +} + +type PostTimeSeriesDataPointsOutput struct { + _ struct{} `type:"structure"` + + // The ID of the Amazon DataZone domain in which you want to post time series + // data points. + DomainId *string `locationName:"domainId" type:"string"` + + // The ID of the asset for which you want to post time series data points. + EntityId *string `locationName:"entityId" type:"string"` + + // The type of the asset for which you want to post data points. + EntityType *string `locationName:"entityType" type:"string" enum:"TimeSeriesEntityType"` + + // The forms that contain the data points that you have posted. + Forms []*TimeSeriesDataPointFormOutput_ `locationName:"forms" type:"list"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s PostTimeSeriesDataPointsOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s PostTimeSeriesDataPointsOutput) GoString() string { + return s.String() +} + +// SetDomainId sets the DomainId field's value. +func (s *PostTimeSeriesDataPointsOutput) SetDomainId(v string) *PostTimeSeriesDataPointsOutput { + s.DomainId = &v + return s +} + +// SetEntityId sets the EntityId field's value. +func (s *PostTimeSeriesDataPointsOutput) SetEntityId(v string) *PostTimeSeriesDataPointsOutput { + s.EntityId = &v + return s +} + +// SetEntityType sets the EntityType field's value. +func (s *PostTimeSeriesDataPointsOutput) SetEntityType(v string) *PostTimeSeriesDataPointsOutput { + s.EntityType = &v + return s +} + +// SetForms sets the Forms field's value. +func (s *PostTimeSeriesDataPointsOutput) SetForms(v []*TimeSeriesDataPointFormOutput_) *PostTimeSeriesDataPointsOutput { + s.Forms = v + return s +} + // The configuration of the prediction. type PredictionConfiguration struct { _ struct{} `type:"structure"` @@ -36187,6 +37410,274 @@ func (s *ThrottlingException) RequestID() string { return s.RespMetadata.RequestID } +// The time series data points form. +type TimeSeriesDataPointFormInput_ struct { + _ struct{} `type:"structure"` + + // The content of the time series data points form. + Content *string `locationName:"content" type:"string"` + + // The name of the time series data points form. + // + // FormName is a required field + FormName *string `locationName:"formName" min:"1" type:"string" required:"true"` + + // The timestamp of the time series data points form. + // + // Timestamp is a required field + Timestamp *time.Time `locationName:"timestamp" type:"timestamp" required:"true"` + + // The ID of the type of the time series data points form. + // + // TypeIdentifier is a required field + TypeIdentifier *string `locationName:"typeIdentifier" min:"1" type:"string" required:"true"` + + // The revision type of the time series data points form. + TypeRevision *string `locationName:"typeRevision" min:"1" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s TimeSeriesDataPointFormInput_) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s TimeSeriesDataPointFormInput_) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *TimeSeriesDataPointFormInput_) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "TimeSeriesDataPointFormInput_"} + if s.FormName == nil { + invalidParams.Add(request.NewErrParamRequired("FormName")) + } + if s.FormName != nil && len(*s.FormName) < 1 { + invalidParams.Add(request.NewErrParamMinLen("FormName", 1)) + } + if s.Timestamp == nil { + invalidParams.Add(request.NewErrParamRequired("Timestamp")) + } + if s.TypeIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("TypeIdentifier")) + } + if s.TypeIdentifier != nil && len(*s.TypeIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TypeIdentifier", 1)) + } + if s.TypeRevision != nil && len(*s.TypeRevision) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TypeRevision", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetContent sets the Content field's value. +func (s *TimeSeriesDataPointFormInput_) SetContent(v string) *TimeSeriesDataPointFormInput_ { + s.Content = &v + return s +} + +// SetFormName sets the FormName field's value. +func (s *TimeSeriesDataPointFormInput_) SetFormName(v string) *TimeSeriesDataPointFormInput_ { + s.FormName = &v + return s +} + +// SetTimestamp sets the Timestamp field's value. +func (s *TimeSeriesDataPointFormInput_) SetTimestamp(v time.Time) *TimeSeriesDataPointFormInput_ { + s.Timestamp = &v + return s +} + +// SetTypeIdentifier sets the TypeIdentifier field's value. +func (s *TimeSeriesDataPointFormInput_) SetTypeIdentifier(v string) *TimeSeriesDataPointFormInput_ { + s.TypeIdentifier = &v + return s +} + +// SetTypeRevision sets the TypeRevision field's value. +func (s *TimeSeriesDataPointFormInput_) SetTypeRevision(v string) *TimeSeriesDataPointFormInput_ { + s.TypeRevision = &v + return s +} + +// The time series data points form. +type TimeSeriesDataPointFormOutput_ struct { + _ struct{} `type:"structure"` + + // The content of the time series data points form. + Content *string `locationName:"content" type:"string"` + + // The name of the time series data points form. + // + // FormName is a required field + FormName *string `locationName:"formName" min:"1" type:"string" required:"true"` + + // The ID of the time series data points form. + Id *string `locationName:"id" type:"string"` + + // The timestamp of the time series data points form. + // + // Timestamp is a required field + Timestamp *time.Time `locationName:"timestamp" type:"timestamp" required:"true"` + + // The ID of the type of the time series data points form. + // + // TypeIdentifier is a required field + TypeIdentifier *string `locationName:"typeIdentifier" min:"1" type:"string" required:"true"` + + // The revision type of the time series data points form. + TypeRevision *string `locationName:"typeRevision" min:"1" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s TimeSeriesDataPointFormOutput_) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s TimeSeriesDataPointFormOutput_) GoString() string { + return s.String() +} + +// SetContent sets the Content field's value. +func (s *TimeSeriesDataPointFormOutput_) SetContent(v string) *TimeSeriesDataPointFormOutput_ { + s.Content = &v + return s +} + +// SetFormName sets the FormName field's value. +func (s *TimeSeriesDataPointFormOutput_) SetFormName(v string) *TimeSeriesDataPointFormOutput_ { + s.FormName = &v + return s +} + +// SetId sets the Id field's value. +func (s *TimeSeriesDataPointFormOutput_) SetId(v string) *TimeSeriesDataPointFormOutput_ { + s.Id = &v + return s +} + +// SetTimestamp sets the Timestamp field's value. +func (s *TimeSeriesDataPointFormOutput_) SetTimestamp(v time.Time) *TimeSeriesDataPointFormOutput_ { + s.Timestamp = &v + return s +} + +// SetTypeIdentifier sets the TypeIdentifier field's value. +func (s *TimeSeriesDataPointFormOutput_) SetTypeIdentifier(v string) *TimeSeriesDataPointFormOutput_ { + s.TypeIdentifier = &v + return s +} + +// SetTypeRevision sets the TypeRevision field's value. +func (s *TimeSeriesDataPointFormOutput_) SetTypeRevision(v string) *TimeSeriesDataPointFormOutput_ { + s.TypeRevision = &v + return s +} + +// The summary of the time series data points form. +type TimeSeriesDataPointSummaryFormOutput_ struct { + _ struct{} `type:"structure"` + + // The content of the summary of the time series data points form. + ContentSummary *string `locationName:"contentSummary" type:"string"` + + // The name of the time series data points summary form. + // + // FormName is a required field + FormName *string `locationName:"formName" min:"1" type:"string" required:"true"` + + // The ID of the time series data points summary form. + Id *string `locationName:"id" type:"string"` + + // The timestamp of the time series data points summary form. + // + // Timestamp is a required field + Timestamp *time.Time `locationName:"timestamp" type:"timestamp" required:"true"` + + // The type ID of the time series data points summary form. + // + // TypeIdentifier is a required field + TypeIdentifier *string `locationName:"typeIdentifier" min:"1" type:"string" required:"true"` + + // The type revision of the time series data points summary form. + TypeRevision *string `locationName:"typeRevision" min:"1" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s TimeSeriesDataPointSummaryFormOutput_) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s TimeSeriesDataPointSummaryFormOutput_) GoString() string { + return s.String() +} + +// SetContentSummary sets the ContentSummary field's value. +func (s *TimeSeriesDataPointSummaryFormOutput_) SetContentSummary(v string) *TimeSeriesDataPointSummaryFormOutput_ { + s.ContentSummary = &v + return s +} + +// SetFormName sets the FormName field's value. +func (s *TimeSeriesDataPointSummaryFormOutput_) SetFormName(v string) *TimeSeriesDataPointSummaryFormOutput_ { + s.FormName = &v + return s +} + +// SetId sets the Id field's value. +func (s *TimeSeriesDataPointSummaryFormOutput_) SetId(v string) *TimeSeriesDataPointSummaryFormOutput_ { + s.Id = &v + return s +} + +// SetTimestamp sets the Timestamp field's value. +func (s *TimeSeriesDataPointSummaryFormOutput_) SetTimestamp(v time.Time) *TimeSeriesDataPointSummaryFormOutput_ { + s.Timestamp = &v + return s +} + +// SetTypeIdentifier sets the TypeIdentifier field's value. +func (s *TimeSeriesDataPointSummaryFormOutput_) SetTypeIdentifier(v string) *TimeSeriesDataPointSummaryFormOutput_ { + s.TypeIdentifier = &v + return s +} + +// SetTypeRevision sets the TypeRevision field's value. +func (s *TimeSeriesDataPointSummaryFormOutput_) SetTypeRevision(v string) *TimeSeriesDataPointSummaryFormOutput_ { + s.TypeRevision = &v + return s +} + // The topic of the notification. type Topic struct { _ struct{} `type:"structure"` @@ -40319,12 +41810,16 @@ func RejectRuleBehavior_Values() []string { const ( // SearchOutputAdditionalAttributeForms is a SearchOutputAdditionalAttribute enum value SearchOutputAdditionalAttributeForms = "FORMS" + + // SearchOutputAdditionalAttributeTimeSeriesDataPointForms is a SearchOutputAdditionalAttribute enum value + SearchOutputAdditionalAttributeTimeSeriesDataPointForms = "TIME_SERIES_DATA_POINT_FORMS" ) // SearchOutputAdditionalAttribute_Values returns all elements of the SearchOutputAdditionalAttribute enum func SearchOutputAdditionalAttribute_Values() []string { return []string{ SearchOutputAdditionalAttributeForms, + SearchOutputAdditionalAttributeTimeSeriesDataPointForms, } } @@ -40504,6 +41999,22 @@ func TaskStatus_Values() []string { } } +const ( + // TimeSeriesEntityTypeAsset is a TimeSeriesEntityType enum value + TimeSeriesEntityTypeAsset = "ASSET" + + // TimeSeriesEntityTypeListing is a TimeSeriesEntityType enum value + TimeSeriesEntityTypeListing = "LISTING" +) + +// TimeSeriesEntityType_Values returns all elements of the TimeSeriesEntityType enum +func TimeSeriesEntityType_Values() []string { + return []string{ + TimeSeriesEntityTypeAsset, + TimeSeriesEntityTypeListing, + } +} + const ( // TimezoneUtc is a Timezone enum value TimezoneUtc = "UTC" diff --git a/service/datazone/datazoneiface/interface.go b/service/datazone/datazoneiface/interface.go index 7230c310587..d9268fca8a9 100644 --- a/service/datazone/datazoneiface/interface.go +++ b/service/datazone/datazoneiface/interface.go @@ -212,6 +212,10 @@ type DataZoneAPI interface { DeleteSubscriptionTargetWithContext(aws.Context, *datazone.DeleteSubscriptionTargetInput, ...request.Option) (*datazone.DeleteSubscriptionTargetOutput, error) DeleteSubscriptionTargetRequest(*datazone.DeleteSubscriptionTargetInput) (*request.Request, *datazone.DeleteSubscriptionTargetOutput) + DeleteTimeSeriesDataPoints(*datazone.DeleteTimeSeriesDataPointsInput) (*datazone.DeleteTimeSeriesDataPointsOutput, error) + DeleteTimeSeriesDataPointsWithContext(aws.Context, *datazone.DeleteTimeSeriesDataPointsInput, ...request.Option) (*datazone.DeleteTimeSeriesDataPointsOutput, error) + DeleteTimeSeriesDataPointsRequest(*datazone.DeleteTimeSeriesDataPointsInput) (*request.Request, *datazone.DeleteTimeSeriesDataPointsOutput) + GetAsset(*datazone.GetAssetInput) (*datazone.GetAssetOutput, error) GetAssetWithContext(aws.Context, *datazone.GetAssetInput, ...request.Option) (*datazone.GetAssetOutput, error) GetAssetRequest(*datazone.GetAssetInput) (*request.Request, *datazone.GetAssetOutput) @@ -296,6 +300,10 @@ type DataZoneAPI interface { GetSubscriptionTargetWithContext(aws.Context, *datazone.GetSubscriptionTargetInput, ...request.Option) (*datazone.GetSubscriptionTargetOutput, error) GetSubscriptionTargetRequest(*datazone.GetSubscriptionTargetInput) (*request.Request, *datazone.GetSubscriptionTargetOutput) + GetTimeSeriesDataPoint(*datazone.GetTimeSeriesDataPointInput) (*datazone.GetTimeSeriesDataPointOutput, error) + GetTimeSeriesDataPointWithContext(aws.Context, *datazone.GetTimeSeriesDataPointInput, ...request.Option) (*datazone.GetTimeSeriesDataPointOutput, error) + GetTimeSeriesDataPointRequest(*datazone.GetTimeSeriesDataPointInput) (*request.Request, *datazone.GetTimeSeriesDataPointOutput) + GetUserProfile(*datazone.GetUserProfileInput) (*datazone.GetUserProfileOutput, error) GetUserProfileWithContext(aws.Context, *datazone.GetUserProfileInput, ...request.Option) (*datazone.GetUserProfileOutput, error) GetUserProfileRequest(*datazone.GetUserProfileInput) (*request.Request, *datazone.GetUserProfileOutput) @@ -423,6 +431,17 @@ type DataZoneAPI interface { ListTagsForResourceWithContext(aws.Context, *datazone.ListTagsForResourceInput, ...request.Option) (*datazone.ListTagsForResourceOutput, error) ListTagsForResourceRequest(*datazone.ListTagsForResourceInput) (*request.Request, *datazone.ListTagsForResourceOutput) + ListTimeSeriesDataPoints(*datazone.ListTimeSeriesDataPointsInput) (*datazone.ListTimeSeriesDataPointsOutput, error) + ListTimeSeriesDataPointsWithContext(aws.Context, *datazone.ListTimeSeriesDataPointsInput, ...request.Option) (*datazone.ListTimeSeriesDataPointsOutput, error) + ListTimeSeriesDataPointsRequest(*datazone.ListTimeSeriesDataPointsInput) (*request.Request, *datazone.ListTimeSeriesDataPointsOutput) + + ListTimeSeriesDataPointsPages(*datazone.ListTimeSeriesDataPointsInput, func(*datazone.ListTimeSeriesDataPointsOutput, bool) bool) error + ListTimeSeriesDataPointsPagesWithContext(aws.Context, *datazone.ListTimeSeriesDataPointsInput, func(*datazone.ListTimeSeriesDataPointsOutput, bool) bool, ...request.Option) error + + PostTimeSeriesDataPoints(*datazone.PostTimeSeriesDataPointsInput) (*datazone.PostTimeSeriesDataPointsOutput, error) + PostTimeSeriesDataPointsWithContext(aws.Context, *datazone.PostTimeSeriesDataPointsInput, ...request.Option) (*datazone.PostTimeSeriesDataPointsOutput, error) + PostTimeSeriesDataPointsRequest(*datazone.PostTimeSeriesDataPointsInput) (*request.Request, *datazone.PostTimeSeriesDataPointsOutput) + PutEnvironmentBlueprintConfiguration(*datazone.PutEnvironmentBlueprintConfigurationInput) (*datazone.PutEnvironmentBlueprintConfigurationOutput, error) PutEnvironmentBlueprintConfigurationWithContext(aws.Context, *datazone.PutEnvironmentBlueprintConfigurationInput, ...request.Option) (*datazone.PutEnvironmentBlueprintConfigurationOutput, error) PutEnvironmentBlueprintConfigurationRequest(*datazone.PutEnvironmentBlueprintConfigurationInput) (*request.Request, *datazone.PutEnvironmentBlueprintConfigurationOutput) diff --git a/service/docdb/api.go b/service/docdb/api.go index 980e769f7ed..14982c4c05b 100644 --- a/service/docdb/api.go +++ b/service/docdb/api.go @@ -5603,6 +5603,95 @@ func (c *DocDB) StopDBClusterWithContext(ctx aws.Context, input *StopDBClusterIn return out, req.Send() } +const opSwitchoverGlobalCluster = "SwitchoverGlobalCluster" + +// SwitchoverGlobalClusterRequest generates a "aws/request.Request" representing the +// client's request for the SwitchoverGlobalCluster operation. The "output" return +// value will be populated with the request's response once the request completes +// successfully. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See SwitchoverGlobalCluster for more information on using the SwitchoverGlobalCluster +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// // Example sending a request using the SwitchoverGlobalClusterRequest method. +// req, resp := client.SwitchoverGlobalClusterRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/docdb-2014-10-31/SwitchoverGlobalCluster +func (c *DocDB) SwitchoverGlobalClusterRequest(input *SwitchoverGlobalClusterInput) (req *request.Request, output *SwitchoverGlobalClusterOutput) { + op := &request.Operation{ + Name: opSwitchoverGlobalCluster, + HTTPMethod: "POST", + HTTPPath: "/", + } + + if input == nil { + input = &SwitchoverGlobalClusterInput{} + } + + output = &SwitchoverGlobalClusterOutput{} + req = c.newRequest(op, input, output) + return +} + +// SwitchoverGlobalCluster API operation for Amazon DocumentDB with MongoDB compatibility. +// +// Switches over the specified secondary Amazon DocumentDB cluster to be the +// new primary Amazon DocumentDB cluster in the global database cluster. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon DocumentDB with MongoDB compatibility's +// API operation SwitchoverGlobalCluster for usage and error information. +// +// Returned Error Codes: +// +// - ErrCodeGlobalClusterNotFoundFault "GlobalClusterNotFoundFault" +// The GlobalClusterIdentifier doesn't refer to an existing global cluster. +// +// - ErrCodeInvalidGlobalClusterStateFault "InvalidGlobalClusterStateFault" +// The requested operation can't be performed while the cluster is in this state. +// +// - ErrCodeDBClusterNotFoundFault "DBClusterNotFoundFault" +// DBClusterIdentifier doesn't refer to an existing cluster. +// +// - ErrCodeInvalidDBClusterStateFault "InvalidDBClusterStateFault" +// The cluster isn't in a valid state. +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/docdb-2014-10-31/SwitchoverGlobalCluster +func (c *DocDB) SwitchoverGlobalCluster(input *SwitchoverGlobalClusterInput) (*SwitchoverGlobalClusterOutput, error) { + req, out := c.SwitchoverGlobalClusterRequest(input) + return out, req.Send() +} + +// SwitchoverGlobalClusterWithContext is the same as SwitchoverGlobalCluster with the addition of +// the ability to pass a context and additional request options. +// +// See SwitchoverGlobalCluster for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *DocDB) SwitchoverGlobalClusterWithContext(ctx aws.Context, input *SwitchoverGlobalClusterInput, opts ...request.Option) (*SwitchoverGlobalClusterOutput, error) { + req, out := c.SwitchoverGlobalClusterRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + // Represents the input to AddSourceIdentifierToSubscription. type AddSourceIdentifierToSubscriptionInput struct { _ struct{} `type:"structure"` @@ -15743,6 +15832,123 @@ func (s *Subnet) SetSubnetStatus(v string) *Subnet { return s } +type SwitchoverGlobalClusterInput struct { + _ struct{} `type:"structure"` + + // The identifier of the Amazon DocumentDB global database cluster to switch + // over. The identifier is the unique key assigned by the user when the cluster + // is created. In other words, it's the name of the global cluster. This parameter + // isn’t case-sensitive. + // + // Constraints: + // + // * Must match the identifier of an existing global cluster (Amazon DocumentDB + // global database). + // + // * Minimum length of 1. Maximum length of 255. + // + // Pattern: [A-Za-z][0-9A-Za-z-:._]* + // + // GlobalClusterIdentifier is a required field + GlobalClusterIdentifier *string `min:"1" type:"string" required:"true"` + + // The identifier of the secondary Amazon DocumentDB cluster to promote to the + // new primary for the global database cluster. Use the Amazon Resource Name + // (ARN) for the identifier so that Amazon DocumentDB can locate the cluster + // in its Amazon Web Services region. + // + // Constraints: + // + // * Must match the identifier of an existing secondary cluster. + // + // * Minimum length of 1. Maximum length of 255. + // + // Pattern: [A-Za-z][0-9A-Za-z-:._]* + // + // TargetDbClusterIdentifier is a required field + TargetDbClusterIdentifier *string `type:"string" required:"true"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s SwitchoverGlobalClusterInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s SwitchoverGlobalClusterInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *SwitchoverGlobalClusterInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "SwitchoverGlobalClusterInput"} + if s.GlobalClusterIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("GlobalClusterIdentifier")) + } + if s.GlobalClusterIdentifier != nil && len(*s.GlobalClusterIdentifier) < 1 { + invalidParams.Add(request.NewErrParamMinLen("GlobalClusterIdentifier", 1)) + } + if s.TargetDbClusterIdentifier == nil { + invalidParams.Add(request.NewErrParamRequired("TargetDbClusterIdentifier")) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetGlobalClusterIdentifier sets the GlobalClusterIdentifier field's value. +func (s *SwitchoverGlobalClusterInput) SetGlobalClusterIdentifier(v string) *SwitchoverGlobalClusterInput { + s.GlobalClusterIdentifier = &v + return s +} + +// SetTargetDbClusterIdentifier sets the TargetDbClusterIdentifier field's value. +func (s *SwitchoverGlobalClusterInput) SetTargetDbClusterIdentifier(v string) *SwitchoverGlobalClusterInput { + s.TargetDbClusterIdentifier = &v + return s +} + +type SwitchoverGlobalClusterOutput struct { + _ struct{} `type:"structure"` + + // A data type representing an Amazon DocumentDB global cluster. + GlobalCluster *GlobalCluster `type:"structure"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s SwitchoverGlobalClusterOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s SwitchoverGlobalClusterOutput) GoString() string { + return s.String() +} + +// SetGlobalCluster sets the GlobalCluster field's value. +func (s *SwitchoverGlobalClusterOutput) SetGlobalCluster(v *GlobalCluster) *SwitchoverGlobalClusterOutput { + s.GlobalCluster = v + return s +} + // Metadata assigned to an Amazon DocumentDB resource consisting of a key-value // pair. type Tag struct { diff --git a/service/docdb/docdbiface/interface.go b/service/docdb/docdbiface/interface.go index 414f394cac1..ac794da6405 100644 --- a/service/docdb/docdbiface/interface.go +++ b/service/docdb/docdbiface/interface.go @@ -311,6 +311,10 @@ type DocDBAPI interface { StopDBClusterWithContext(aws.Context, *docdb.StopDBClusterInput, ...request.Option) (*docdb.StopDBClusterOutput, error) StopDBClusterRequest(*docdb.StopDBClusterInput) (*request.Request, *docdb.StopDBClusterOutput) + SwitchoverGlobalCluster(*docdb.SwitchoverGlobalClusterInput) (*docdb.SwitchoverGlobalClusterOutput, error) + SwitchoverGlobalClusterWithContext(aws.Context, *docdb.SwitchoverGlobalClusterInput, ...request.Option) (*docdb.SwitchoverGlobalClusterOutput, error) + SwitchoverGlobalClusterRequest(*docdb.SwitchoverGlobalClusterInput) (*request.Request, *docdb.SwitchoverGlobalClusterOutput) + WaitUntilDBInstanceAvailable(*docdb.DescribeDBInstancesInput) error WaitUntilDBInstanceAvailableWithContext(aws.Context, *docdb.DescribeDBInstancesInput, ...request.WaiterOption) error diff --git a/service/groundstation/api.go b/service/groundstation/api.go index a1498955885..abe0a36ddc1 100644 --- a/service/groundstation/api.go +++ b/service/groundstation/api.go @@ -4420,6 +4420,20 @@ type ContactData struct { // Tags assigned to a contact. Tags map[string]*string `locationName:"tags" type:"map"` + + // Projected time in UTC your satellite will set below the receive mask (https://docs.aws.amazon.com/ground-station/latest/ug/site-masks.html). + // This time is based on the satellite's current active ephemeris for future + // contacts and the ephemeris that was active during contact execution for completed + // contacts. This field is not present for contacts with a SCHEDULING or SCHEDULED + // status. + VisibilityEndTime *time.Time `locationName:"visibilityEndTime" type:"timestamp"` + + // Projected time in UTC your satellite will rise above the receive mask (https://docs.aws.amazon.com/ground-station/latest/ug/site-masks.html). + // This time is based on the satellite's current active ephemeris for future + // contacts and the ephemeris that was active during contact execution for completed + // contacts. This field is not present for contacts with a SCHEDULING or SCHEDULED + // status. + VisibilityStartTime *time.Time `locationName:"visibilityStartTime" type:"timestamp"` } // String returns the string representation. @@ -4518,6 +4532,18 @@ func (s *ContactData) SetTags(v map[string]*string) *ContactData { return s } +// SetVisibilityEndTime sets the VisibilityEndTime field's value. +func (s *ContactData) SetVisibilityEndTime(v time.Time) *ContactData { + s.VisibilityEndTime = &v + return s +} + +// SetVisibilityStartTime sets the VisibilityStartTime field's value. +func (s *ContactData) SetVisibilityStartTime(v time.Time) *ContactData { + s.VisibilityStartTime = &v + return s +} + type CreateConfigInput struct { _ struct{} `type:"structure"` @@ -4942,12 +4968,12 @@ func (s *CreateEphemerisOutput) SetEphemerisId(v string) *CreateEphemerisOutput type CreateMissionProfileInput struct { _ struct{} `type:"structure"` - // Amount of time after a contact ends that you’d like to receive a CloudWatch - // event indicating the pass has finished. + // Amount of time after a contact ends that you’d like to receive a Ground + // Station Contact State Change event indicating the pass has finished. ContactPostPassDurationSeconds *int64 `locationName:"contactPostPassDurationSeconds" type:"integer"` - // Amount of time prior to contact start you’d like to receive a CloudWatch - // event indicating an upcoming pass. + // Amount of time prior to contact start you’d like to receive a Ground Station + // Contact State Change event indicating an upcoming pass. ContactPrePassDurationSeconds *int64 `locationName:"contactPrePassDurationSeconds" type:"integer"` // A list of lists of ARNs. Each list of ARNs is an edge, with a from Config @@ -6012,6 +6038,18 @@ type DescribeContactOutput struct { // Tags assigned to a contact. Tags map[string]*string `locationName:"tags" type:"map"` + + // Projected time in UTC your satellite will set below the receive mask (https://docs.aws.amazon.com/ground-station/latest/ug/site-masks.html). + // This time is based on the satellite's current active ephemeris for future + // contacts and the ephemeris that was active during contact execution for completed + // contacts. + VisibilityEndTime *time.Time `locationName:"visibilityEndTime" type:"timestamp"` + + // Projected time in UTC your satellite will rise above the receive mask (https://docs.aws.amazon.com/ground-station/latest/ug/site-masks.html). + // This time is based on the satellite's current active ephemeris for future + // contacts and the ephemeris that was active during contact execution for completed + // contacts. + VisibilityStartTime *time.Time `locationName:"visibilityStartTime" type:"timestamp"` } // String returns the string representation. @@ -6116,6 +6154,18 @@ func (s *DescribeContactOutput) SetTags(v map[string]*string) *DescribeContactOu return s } +// SetVisibilityEndTime sets the VisibilityEndTime field's value. +func (s *DescribeContactOutput) SetVisibilityEndTime(v time.Time) *DescribeContactOutput { + s.VisibilityEndTime = &v + return s +} + +// SetVisibilityStartTime sets the VisibilityStartTime field's value. +func (s *DescribeContactOutput) SetVisibilityStartTime(v time.Time) *DescribeContactOutput { + s.VisibilityStartTime = &v + return s +} + type DescribeEphemerisInput struct { _ struct{} `type:"structure" nopayload:"true"` @@ -10966,12 +11016,12 @@ func (s *UpdateEphemerisOutput) SetEphemerisId(v string) *UpdateEphemerisOutput type UpdateMissionProfileInput struct { _ struct{} `type:"structure"` - // Amount of time after a contact ends that you’d like to receive a CloudWatch - // event indicating the pass has finished. + // Amount of time after a contact ends that you’d like to receive a Ground + // Station Contact State Change event indicating the pass has finished. ContactPostPassDurationSeconds *int64 `locationName:"contactPostPassDurationSeconds" type:"integer"` - // Amount of time after a contact ends that you’d like to receive a CloudWatch - // event indicating the pass has finished. + // Amount of time after a contact ends that you’d like to receive a Ground + // Station Contact State Change event indicating the pass has finished. ContactPrePassDurationSeconds *int64 `locationName:"contactPrePassDurationSeconds" type:"integer"` // A list of lists of ARNs. Each list of ARNs is an edge, with a from Config diff --git a/service/lambda/api.go b/service/lambda/api.go index c29cb6b1d5c..783776150b2 100644 --- a/service/lambda/api.go +++ b/service/lambda/api.go @@ -9264,7 +9264,7 @@ type CreateFunctionInput struct { // (https://docs.aws.amazon.com/lambda/latest/dg/foundation-progmodel.html). Handler *string `type:"string"` - // Container image configuration values (https://docs.aws.amazon.com/lambda/latest/dg/configuration-images.html#configuration-images-settings) + // Container image configuration values (https://docs.aws.amazon.com/lambda/latest/dg/images-create.html#images-parms) // that override the values in the container image Dockerfile. ImageConfig *ImageConfig `type:"structure"` @@ -21951,7 +21951,7 @@ type UpdateFunctionConfigurationInput struct { // (https://docs.aws.amazon.com/lambda/latest/dg/foundation-progmodel.html). Handler *string `type:"string"` - // Container image configuration values (https://docs.aws.amazon.com/lambda/latest/dg/images-parms.html) + // Container image configuration values (https://docs.aws.amazon.com/lambda/latest/dg/images-create.html#images-parms) // that override the values in the container image Docker file. ImageConfig *ImageConfig `type:"structure"` @@ -23195,6 +23195,9 @@ const ( // RuntimeRuby32 is a Runtime enum value RuntimeRuby32 = "ruby3.2" + // RuntimeRuby33 is a Runtime enum value + RuntimeRuby33 = "ruby3.3" + // RuntimePython311 is a Runtime enum value RuntimePython311 = "python3.11" @@ -23246,6 +23249,7 @@ func Runtime_Values() []string { RuntimePython310, RuntimeJava17, RuntimeRuby32, + RuntimeRuby33, RuntimePython311, RuntimeNodejs20X, RuntimeProvidedAl2023, diff --git a/service/medialive/api.go b/service/medialive/api.go index 529c0bc58fb..592ceafc9eb 100644 --- a/service/medialive/api.go +++ b/service/medialive/api.go @@ -7381,6 +7381,10 @@ func (s *AudioCodecSettings) SetWavSettings(v *WavSettings) *AudioCodecSettings type AudioDescription struct { _ struct{} `type:"structure"` + // Identifies the DASH roles to assign to this audio output. Applies only when + // the audio output is configured for DVB DASH accessibility signaling. + AudioDashRoles []*string `locationName:"audioDashRoles" type:"list" enum:"DashRoleAudio"` + // Advanced audio normalization settings. AudioNormalizationSettings *AudioNormalizationSettings `locationName:"audioNormalizationSettings" type:"structure"` @@ -7408,6 +7412,11 @@ type AudioDescription struct { // Audio codec settings. CodecSettings *AudioCodecSettings `locationName:"codecSettings" type:"structure"` + // Identifies DVB DASH accessibility signaling in this audio output. Used in + // Microsoft Smooth Streaming outputs to signal accessibility information to + // packagers. + DvbDashAccessibility *string `locationName:"dvbDashAccessibility" type:"string" enum:"DvbDashAccessibility"` + // RFC 5646 language code representing the language of the audio output track. // Only used if languageControlMode is useConfigured, or there is no ISO 639 // language code specified in the input. @@ -7487,6 +7496,12 @@ func (s *AudioDescription) Validate() error { return nil } +// SetAudioDashRoles sets the AudioDashRoles field's value. +func (s *AudioDescription) SetAudioDashRoles(v []*string) *AudioDescription { + s.AudioDashRoles = v + return s +} + // SetAudioNormalizationSettings sets the AudioNormalizationSettings field's value. func (s *AudioDescription) SetAudioNormalizationSettings(v *AudioNormalizationSettings) *AudioDescription { s.AudioNormalizationSettings = v @@ -7523,6 +7538,12 @@ func (s *AudioDescription) SetCodecSettings(v *AudioCodecSettings) *AudioDescrip return s } +// SetDvbDashAccessibility sets the DvbDashAccessibility field's value. +func (s *AudioDescription) SetDvbDashAccessibility(v string) *AudioDescription { + s.DvbDashAccessibility = &v + return s +} + // SetLanguageCode sets the LanguageCode field's value. func (s *AudioDescription) SetLanguageCode(v string) *AudioDescription { s.LanguageCode = &v @@ -9768,6 +9789,10 @@ type CaptionDescription struct { // is added to HLS output group and MediaPackage output group. Accessibility *string `locationName:"accessibility" type:"string" enum:"AccessibilityType"` + // Identifies the DASH roles to assign to this captions output. Applies only + // when the captions output is configured for DVB DASH accessibility signaling. + CaptionDashRoles []*string `locationName:"captionDashRoles" type:"list" enum:"DashRoleCaption"` + // Specifies which input caption selector to use as a caption source when generating // output captions. This field should match a captionSelector name. // @@ -9778,6 +9803,11 @@ type CaptionDescription struct { // type. DestinationSettings *CaptionDestinationSettings `locationName:"destinationSettings" type:"structure"` + // Identifies DVB DASH accessibility signaling in this captions output. Used + // in Microsoft Smooth Streaming outputs to signal accessibility information + // to packagers. + DvbDashAccessibility *string `locationName:"dvbDashAccessibility" type:"string" enum:"DvbDashAccessibility"` + // ISO 639-2 three-digit code: http://www.loc.gov/standards/iso639-2/ LanguageCode *string `locationName:"languageCode" type:"string"` @@ -9837,6 +9867,12 @@ func (s *CaptionDescription) SetAccessibility(v string) *CaptionDescription { return s } +// SetCaptionDashRoles sets the CaptionDashRoles field's value. +func (s *CaptionDescription) SetCaptionDashRoles(v []*string) *CaptionDescription { + s.CaptionDashRoles = v + return s +} + // SetCaptionSelectorName sets the CaptionSelectorName field's value. func (s *CaptionDescription) SetCaptionSelectorName(v string) *CaptionDescription { s.CaptionSelectorName = &v @@ -9849,6 +9885,12 @@ func (s *CaptionDescription) SetDestinationSettings(v *CaptionDestinationSetting return s } +// SetDvbDashAccessibility sets the DvbDashAccessibility field's value. +func (s *CaptionDescription) SetDvbDashAccessibility(v string) *CaptionDescription { + s.DvbDashAccessibility = &v + return s +} + // SetLanguageCode sets the LanguageCode field's value. func (s *CaptionDescription) SetLanguageCode(v string) *CaptionDescription { s.LanguageCode = &v @@ -10900,6 +10942,139 @@ func (s ClaimDeviceOutput) GoString() string { return s.String() } +// Cmaf Ingest Group Settings +type CmafIngestGroupSettings struct { + _ struct{} `type:"structure"` + + // A HTTP destination for the tracks + // + // Destination is a required field + Destination *OutputLocationRef `locationName:"destination" type:"structure" required:"true"` + + // If set to passthrough, Nielsen inaudible tones for media tracking will be + // detected in the input audio and an equivalent ID3 tag will be inserted in + // the output. + NielsenId3Behavior *string `locationName:"nielsenId3Behavior" type:"string" enum:"CmafNielsenId3Behavior"` + + // Type of scte35 track to add. none or scte35WithoutSegmentation + Scte35Type *string `locationName:"scte35Type" type:"string" enum:"Scte35Type"` + + // The nominal duration of segments. The units are specified in SegmentLengthUnits. + // The segments will end on the next keyframe after the specified duration, + // so the actual segment length might be longer, and it might be a fraction + // of the units. + SegmentLength *int64 `locationName:"segmentLength" min:"1" type:"integer"` + + // Time unit for segment length parameter. + SegmentLengthUnits *string `locationName:"segmentLengthUnits" type:"string" enum:"CmafIngestSegmentLengthUnits"` + + // Number of milliseconds to delay the output from the second pipeline. + SendDelayMs *int64 `locationName:"sendDelayMs" type:"integer"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s CmafIngestGroupSettings) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s CmafIngestGroupSettings) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *CmafIngestGroupSettings) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "CmafIngestGroupSettings"} + if s.Destination == nil { + invalidParams.Add(request.NewErrParamRequired("Destination")) + } + if s.SegmentLength != nil && *s.SegmentLength < 1 { + invalidParams.Add(request.NewErrParamMinValue("SegmentLength", 1)) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetDestination sets the Destination field's value. +func (s *CmafIngestGroupSettings) SetDestination(v *OutputLocationRef) *CmafIngestGroupSettings { + s.Destination = v + return s +} + +// SetNielsenId3Behavior sets the NielsenId3Behavior field's value. +func (s *CmafIngestGroupSettings) SetNielsenId3Behavior(v string) *CmafIngestGroupSettings { + s.NielsenId3Behavior = &v + return s +} + +// SetScte35Type sets the Scte35Type field's value. +func (s *CmafIngestGroupSettings) SetScte35Type(v string) *CmafIngestGroupSettings { + s.Scte35Type = &v + return s +} + +// SetSegmentLength sets the SegmentLength field's value. +func (s *CmafIngestGroupSettings) SetSegmentLength(v int64) *CmafIngestGroupSettings { + s.SegmentLength = &v + return s +} + +// SetSegmentLengthUnits sets the SegmentLengthUnits field's value. +func (s *CmafIngestGroupSettings) SetSegmentLengthUnits(v string) *CmafIngestGroupSettings { + s.SegmentLengthUnits = &v + return s +} + +// SetSendDelayMs sets the SendDelayMs field's value. +func (s *CmafIngestGroupSettings) SetSendDelayMs(v int64) *CmafIngestGroupSettings { + s.SendDelayMs = &v + return s +} + +// Cmaf Ingest Output Settings +type CmafIngestOutputSettings struct { + _ struct{} `type:"structure"` + + // String concatenated to the end of the destination filename. Required for + // multiple outputs of the same type. + NameModifier *string `locationName:"nameModifier" type:"string"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s CmafIngestOutputSettings) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s CmafIngestOutputSettings) GoString() string { + return s.String() +} + +// SetNameModifier sets the NameModifier field's value. +func (s *CmafIngestOutputSettings) SetNameModifier(v string) *CmafIngestOutputSettings { + s.NameModifier = &v + return s +} + // Property of ColorCorrectionSettings. Used for custom color space conversion. // The object identifies one 3D LUT file and specifies the input/output color // space combination that the file will be used for. @@ -26969,6 +27144,9 @@ type OutputGroupSettings struct { // Archive Group Settings ArchiveGroupSettings *ArchiveGroupSettings `locationName:"archiveGroupSettings" type:"structure"` + // Cmaf Ingest Group Settings + CmafIngestGroupSettings *CmafIngestGroupSettings `locationName:"cmafIngestGroupSettings" type:"structure"` + // Frame Capture Group Settings FrameCaptureGroupSettings *FrameCaptureGroupSettings `locationName:"frameCaptureGroupSettings" type:"structure"` @@ -27017,6 +27195,11 @@ func (s *OutputGroupSettings) Validate() error { invalidParams.AddNested("ArchiveGroupSettings", err.(request.ErrInvalidParams)) } } + if s.CmafIngestGroupSettings != nil { + if err := s.CmafIngestGroupSettings.Validate(); err != nil { + invalidParams.AddNested("CmafIngestGroupSettings", err.(request.ErrInvalidParams)) + } + } if s.FrameCaptureGroupSettings != nil { if err := s.FrameCaptureGroupSettings.Validate(); err != nil { invalidParams.AddNested("FrameCaptureGroupSettings", err.(request.ErrInvalidParams)) @@ -27055,6 +27238,12 @@ func (s *OutputGroupSettings) SetArchiveGroupSettings(v *ArchiveGroupSettings) * return s } +// SetCmafIngestGroupSettings sets the CmafIngestGroupSettings field's value. +func (s *OutputGroupSettings) SetCmafIngestGroupSettings(v *CmafIngestGroupSettings) *OutputGroupSettings { + s.CmafIngestGroupSettings = v + return s +} + // SetFrameCaptureGroupSettings sets the FrameCaptureGroupSettings field's value. func (s *OutputGroupSettings) SetFrameCaptureGroupSettings(v *FrameCaptureGroupSettings) *OutputGroupSettings { s.FrameCaptureGroupSettings = v @@ -27176,6 +27365,9 @@ type OutputSettings struct { // Archive Output Settings ArchiveOutputSettings *ArchiveOutputSettings `locationName:"archiveOutputSettings" type:"structure"` + // Cmaf Ingest Output Settings + CmafIngestOutputSettings *CmafIngestOutputSettings `locationName:"cmafIngestOutputSettings" type:"structure"` + // Frame Capture Output Settings FrameCaptureOutputSettings *FrameCaptureOutputSettings `locationName:"frameCaptureOutputSettings" type:"structure"` @@ -27257,6 +27449,12 @@ func (s *OutputSettings) SetArchiveOutputSettings(v *ArchiveOutputSettings) *Out return s } +// SetCmafIngestOutputSettings sets the CmafIngestOutputSettings field's value. +func (s *OutputSettings) SetCmafIngestOutputSettings(v *CmafIngestOutputSettings) *OutputSettings { + s.CmafIngestOutputSettings = v + return s +} + // SetFrameCaptureOutputSettings sets the FrameCaptureOutputSettings field's value. func (s *OutputSettings) SetFrameCaptureOutputSettings(v *FrameCaptureOutputSettings) *OutputSettings { s.FrameCaptureOutputSettings = v @@ -35196,6 +35394,40 @@ func ChannelState_Values() []string { } } +// Cmaf Ingest Segment Length Units +const ( + // CmafIngestSegmentLengthUnitsMilliseconds is a CmafIngestSegmentLengthUnits enum value + CmafIngestSegmentLengthUnitsMilliseconds = "MILLISECONDS" + + // CmafIngestSegmentLengthUnitsSeconds is a CmafIngestSegmentLengthUnits enum value + CmafIngestSegmentLengthUnitsSeconds = "SECONDS" +) + +// CmafIngestSegmentLengthUnits_Values returns all elements of the CmafIngestSegmentLengthUnits enum +func CmafIngestSegmentLengthUnits_Values() []string { + return []string{ + CmafIngestSegmentLengthUnitsMilliseconds, + CmafIngestSegmentLengthUnitsSeconds, + } +} + +// Cmaf Nielsen Id3 Behavior +const ( + // CmafNielsenId3BehaviorNoPassthrough is a CmafNielsenId3Behavior enum value + CmafNielsenId3BehaviorNoPassthrough = "NO_PASSTHROUGH" + + // CmafNielsenId3BehaviorPassthrough is a CmafNielsenId3Behavior enum value + CmafNielsenId3BehaviorPassthrough = "PASSTHROUGH" +) + +// CmafNielsenId3Behavior_Values returns all elements of the CmafNielsenId3Behavior enum +func CmafNielsenId3Behavior_Values() []string { + return []string{ + CmafNielsenId3BehaviorNoPassthrough, + CmafNielsenId3BehaviorPassthrough, + } +} + // Property of colorCorrections. When you are using 3D LUT files to perform // color conversion on video, these are the supported color spaces. const ( @@ -35234,6 +35466,112 @@ func ContentType_Values() []string { } } +// Dash Role Audio +const ( + // DashRoleAudioAlternate is a DashRoleAudio enum value + DashRoleAudioAlternate = "ALTERNATE" + + // DashRoleAudioCommentary is a DashRoleAudio enum value + DashRoleAudioCommentary = "COMMENTARY" + + // DashRoleAudioDescription is a DashRoleAudio enum value + DashRoleAudioDescription = "DESCRIPTION" + + // DashRoleAudioDub is a DashRoleAudio enum value + DashRoleAudioDub = "DUB" + + // DashRoleAudioEmergency is a DashRoleAudio enum value + DashRoleAudioEmergency = "EMERGENCY" + + // DashRoleAudioEnhancedAudioIntelligibility is a DashRoleAudio enum value + DashRoleAudioEnhancedAudioIntelligibility = "ENHANCED-AUDIO-INTELLIGIBILITY" + + // DashRoleAudioKaraoke is a DashRoleAudio enum value + DashRoleAudioKaraoke = "KARAOKE" + + // DashRoleAudioMain is a DashRoleAudio enum value + DashRoleAudioMain = "MAIN" + + // DashRoleAudioSupplementary is a DashRoleAudio enum value + DashRoleAudioSupplementary = "SUPPLEMENTARY" +) + +// DashRoleAudio_Values returns all elements of the DashRoleAudio enum +func DashRoleAudio_Values() []string { + return []string{ + DashRoleAudioAlternate, + DashRoleAudioCommentary, + DashRoleAudioDescription, + DashRoleAudioDub, + DashRoleAudioEmergency, + DashRoleAudioEnhancedAudioIntelligibility, + DashRoleAudioKaraoke, + DashRoleAudioMain, + DashRoleAudioSupplementary, + } +} + +// Dash Role Caption +const ( + // DashRoleCaptionAlternate is a DashRoleCaption enum value + DashRoleCaptionAlternate = "ALTERNATE" + + // DashRoleCaptionCaption is a DashRoleCaption enum value + DashRoleCaptionCaption = "CAPTION" + + // DashRoleCaptionCommentary is a DashRoleCaption enum value + DashRoleCaptionCommentary = "COMMENTARY" + + // DashRoleCaptionDescription is a DashRoleCaption enum value + DashRoleCaptionDescription = "DESCRIPTION" + + // DashRoleCaptionDub is a DashRoleCaption enum value + DashRoleCaptionDub = "DUB" + + // DashRoleCaptionEasyreader is a DashRoleCaption enum value + DashRoleCaptionEasyreader = "EASYREADER" + + // DashRoleCaptionEmergency is a DashRoleCaption enum value + DashRoleCaptionEmergency = "EMERGENCY" + + // DashRoleCaptionForcedSubtitle is a DashRoleCaption enum value + DashRoleCaptionForcedSubtitle = "FORCED-SUBTITLE" + + // DashRoleCaptionKaraoke is a DashRoleCaption enum value + DashRoleCaptionKaraoke = "KARAOKE" + + // DashRoleCaptionMain is a DashRoleCaption enum value + DashRoleCaptionMain = "MAIN" + + // DashRoleCaptionMetadata is a DashRoleCaption enum value + DashRoleCaptionMetadata = "METADATA" + + // DashRoleCaptionSubtitle is a DashRoleCaption enum value + DashRoleCaptionSubtitle = "SUBTITLE" + + // DashRoleCaptionSupplementary is a DashRoleCaption enum value + DashRoleCaptionSupplementary = "SUPPLEMENTARY" +) + +// DashRoleCaption_Values returns all elements of the DashRoleCaption enum +func DashRoleCaption_Values() []string { + return []string{ + DashRoleCaptionAlternate, + DashRoleCaptionCaption, + DashRoleCaptionCommentary, + DashRoleCaptionDescription, + DashRoleCaptionDub, + DashRoleCaptionEasyreader, + DashRoleCaptionEmergency, + DashRoleCaptionForcedSubtitle, + DashRoleCaptionKaraoke, + DashRoleCaptionMain, + DashRoleCaptionMetadata, + DashRoleCaptionSubtitle, + DashRoleCaptionSupplementary, + } +} + // The status of the action to synchronize the device configuration. If you // change the configuration of the input device (for example, the maximum bitrate), // MediaLive sends the new data to the device. The device might not update itself @@ -35321,6 +35659,43 @@ func DolbyEProgramSelection_Values() []string { } } +// Dvb Dash Accessibility +const ( + // DvbDashAccessibilityDvbdash1VisuallyImpaired is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash1VisuallyImpaired = "DVBDASH_1_VISUALLY_IMPAIRED" + + // DvbDashAccessibilityDvbdash2HardOfHearing is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash2HardOfHearing = "DVBDASH_2_HARD_OF_HEARING" + + // DvbDashAccessibilityDvbdash3SupplementalCommentary is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash3SupplementalCommentary = "DVBDASH_3_SUPPLEMENTAL_COMMENTARY" + + // DvbDashAccessibilityDvbdash4DirectorsCommentary is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash4DirectorsCommentary = "DVBDASH_4_DIRECTORS_COMMENTARY" + + // DvbDashAccessibilityDvbdash5EducationalNotes is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash5EducationalNotes = "DVBDASH_5_EDUCATIONAL_NOTES" + + // DvbDashAccessibilityDvbdash6MainProgram is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash6MainProgram = "DVBDASH_6_MAIN_PROGRAM" + + // DvbDashAccessibilityDvbdash7CleanFeed is a DvbDashAccessibility enum value + DvbDashAccessibilityDvbdash7CleanFeed = "DVBDASH_7_CLEAN_FEED" +) + +// DvbDashAccessibility_Values returns all elements of the DvbDashAccessibility enum +func DvbDashAccessibility_Values() []string { + return []string{ + DvbDashAccessibilityDvbdash1VisuallyImpaired, + DvbDashAccessibilityDvbdash2HardOfHearing, + DvbDashAccessibilityDvbdash3SupplementalCommentary, + DvbDashAccessibilityDvbdash4DirectorsCommentary, + DvbDashAccessibilityDvbdash5EducationalNotes, + DvbDashAccessibilityDvbdash6MainProgram, + DvbDashAccessibilityDvbdash7CleanFeed, + } +} + // Dvb Sdt Output Sdt const ( // DvbSdtOutputSdtSdtFollow is a DvbSdtOutputSdt enum value @@ -39716,6 +40091,23 @@ func Scte35SpliceInsertWebDeliveryAllowedBehavior_Values() []string { } } +// Scte35 Type +const ( + // Scte35TypeNone is a Scte35Type enum value + Scte35TypeNone = "NONE" + + // Scte35TypeScte35WithoutSegmentation is a Scte35Type enum value + Scte35TypeScte35WithoutSegmentation = "SCTE_35_WITHOUT_SEGMENTATION" +) + +// Scte35Type_Values returns all elements of the Scte35Type enum +func Scte35Type_Values() []string { + return []string{ + Scte35TypeNone, + Scte35TypeScte35WithoutSegmentation, + } +} + // Corresponds to the web_delivery_allowed_flag parameter. A value of WEB_DELIVERY_NOT_ALLOWED // corresponds to 0 (false) in the SCTE-35 specification. If you include one // of the "restriction" flags then you must include all four of them. diff --git a/service/medicalimaging/api.go b/service/medicalimaging/api.go index 14ebfbd2f1f..25b4d0790e3 100644 --- a/service/medicalimaging/api.go +++ b/service/medicalimaging/api.go @@ -445,6 +445,12 @@ func (c *MedicalImaging) GetDICOMImportJobRequest(input *GetDICOMImportJobInput) // // Get the import job properties to learn more about the job or job progress. // +// The jobStatus refers to the execution of the import job. Therefore, an import +// job can return a jobStatus as COMPLETED even if validation issues are discovered +// during the import process. If a jobStatus returns as COMPLETED, we still +// recommend you review the output manifests written to S3, as they provide +// details on the success or failure of individual P10 object imports. +// // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. @@ -3045,6 +3051,34 @@ type DICOMTags struct { // String and GoString methods. DICOMPatientSex *string `type:"string" sensitive:"true"` + // The DICOM provided identifier for the series Body Part Examined. + // + // DICOMSeriesBodyPart is a sensitive parameter and its value will be + // replaced with "sensitive" in string returned by DICOMTags's + // String and GoString methods. + DICOMSeriesBodyPart *string `type:"string" sensitive:"true"` + + // The DICOM provided identifier for the Series Instance UID. + // + // DICOMSeriesInstanceUID is a sensitive parameter and its value will be + // replaced with "sensitive" in string returned by DICOMTags's + // String and GoString methods. + DICOMSeriesInstanceUID *string `type:"string" sensitive:"true"` + + // The DICOM provided identifier for the series Modality. + // + // DICOMSeriesModality is a sensitive parameter and its value will be + // replaced with "sensitive" in string returned by DICOMTags's + // String and GoString methods. + DICOMSeriesModality *string `type:"string" sensitive:"true"` + + // The DICOM provided identifier for the Series Number. + // + // DICOMSeriesNumber is a sensitive parameter and its value will be + // replaced with "sensitive" in string returned by DICOMTags's + // String and GoString methods. + DICOMSeriesNumber *int64 `type:"integer" sensitive:"true"` + // The study date. // // DICOMStudyDate is a sensitive parameter and its value will be @@ -3052,21 +3086,21 @@ type DICOMTags struct { // String and GoString methods. DICOMStudyDate *string `type:"string" sensitive:"true"` - // The description of the study. + // The DICOM provided Study Description. // // DICOMStudyDescription is a sensitive parameter and its value will be // replaced with "sensitive" in string returned by DICOMTags's // String and GoString methods. DICOMStudyDescription *string `type:"string" sensitive:"true"` - // The DICOM provided studyId. + // The DICOM provided identifier for the Study ID. // // DICOMStudyId is a sensitive parameter and its value will be // replaced with "sensitive" in string returned by DICOMTags's // String and GoString methods. DICOMStudyId *string `type:"string" sensitive:"true"` - // The DICOM provided identifier for studyInstanceUid.> + // The DICOM provided identifier for the Study Instance UID. // // DICOMStudyInstanceUID is a sensitive parameter and its value will be // replaced with "sensitive" in string returned by DICOMTags's @@ -3141,6 +3175,30 @@ func (s *DICOMTags) SetDICOMPatientSex(v string) *DICOMTags { return s } +// SetDICOMSeriesBodyPart sets the DICOMSeriesBodyPart field's value. +func (s *DICOMTags) SetDICOMSeriesBodyPart(v string) *DICOMTags { + s.DICOMSeriesBodyPart = &v + return s +} + +// SetDICOMSeriesInstanceUID sets the DICOMSeriesInstanceUID field's value. +func (s *DICOMTags) SetDICOMSeriesInstanceUID(v string) *DICOMTags { + s.DICOMSeriesInstanceUID = &v + return s +} + +// SetDICOMSeriesModality sets the DICOMSeriesModality field's value. +func (s *DICOMTags) SetDICOMSeriesModality(v string) *DICOMTags { + s.DICOMSeriesModality = &v + return s +} + +// SetDICOMSeriesNumber sets the DICOMSeriesNumber field's value. +func (s *DICOMTags) SetDICOMSeriesNumber(v int64) *DICOMTags { + s.DICOMSeriesNumber = &v + return s +} + // SetDICOMStudyDate sets the DICOMStudyDate field's value. func (s *DICOMTags) SetDICOMStudyDate(v string) *DICOMTags { s.DICOMStudyDate = &v @@ -5137,6 +5195,13 @@ type SearchByAttributeValue struct { // String and GoString methods. DICOMPatientId *string `type:"string" sensitive:"true"` + // The Series Instance UID input for search. + // + // DICOMSeriesInstanceUID is a sensitive parameter and its value will be + // replaced with "sensitive" in string returned by SearchByAttributeValue's + // String and GoString methods. + DICOMSeriesInstanceUID *string `type:"string" sensitive:"true"` + // The aggregated structure containing DICOM study date and study time for search. DICOMStudyDateAndTime *DICOMStudyDateAndTime `type:"structure"` @@ -5153,6 +5218,9 @@ type SearchByAttributeValue struct { // replaced with "sensitive" in string returned by SearchByAttributeValue's // String and GoString methods. DICOMStudyInstanceUID *string `type:"string" sensitive:"true"` + + // The timestamp input for search. + UpdatedAt *time.Time `locationName:"updatedAt" type:"timestamp"` } // String returns the string representation. @@ -5206,6 +5274,12 @@ func (s *SearchByAttributeValue) SetDICOMPatientId(v string) *SearchByAttributeV return s } +// SetDICOMSeriesInstanceUID sets the DICOMSeriesInstanceUID field's value. +func (s *SearchByAttributeValue) SetDICOMSeriesInstanceUID(v string) *SearchByAttributeValue { + s.DICOMSeriesInstanceUID = &v + return s +} + // SetDICOMStudyDateAndTime sets the DICOMStudyDateAndTime field's value. func (s *SearchByAttributeValue) SetDICOMStudyDateAndTime(v *DICOMStudyDateAndTime) *SearchByAttributeValue { s.DICOMStudyDateAndTime = v @@ -5224,12 +5298,21 @@ func (s *SearchByAttributeValue) SetDICOMStudyInstanceUID(v string) *SearchByAtt return s } +// SetUpdatedAt sets the UpdatedAt field's value. +func (s *SearchByAttributeValue) SetUpdatedAt(v time.Time) *SearchByAttributeValue { + s.UpdatedAt = &v + return s +} + // The search criteria. type SearchCriteria struct { _ struct{} `type:"structure" sensitive:"true"` // The filters for the search criteria. Filters []*SearchFilter `locationName:"filters" min:"1" type:"list"` + + // The sort input for search criteria. + Sort *Sort `locationName:"sort" type:"structure"` } // String returns the string representation. @@ -5266,6 +5349,11 @@ func (s *SearchCriteria) Validate() error { } } } + if s.Sort != nil { + if err := s.Sort.Validate(); err != nil { + invalidParams.AddNested("Sort", err.(request.ErrInvalidParams)) + } + } if invalidParams.Len() > 0 { return invalidParams @@ -5279,6 +5367,12 @@ func (s *SearchCriteria) SetFilters(v []*SearchFilter) *SearchCriteria { return s } +// SetSort sets the Sort field's value. +func (s *SearchCriteria) SetSort(v *Sort) *SearchCriteria { + s.Sort = v + return s +} + // The search filter. type SearchFilter struct { _ struct{} `type:"structure"` @@ -5456,6 +5550,9 @@ type SearchImageSetsOutput struct { // The token for pagination results. NextToken *string `locationName:"nextToken" min:"1" type:"string"` + + // The sort order for image set search results. + Sort *Sort `locationName:"sort" type:"structure"` } // String returns the string representation. @@ -5488,6 +5585,12 @@ func (s *SearchImageSetsOutput) SetNextToken(v string) *SearchImageSetsOutput { return s } +// SetSort sets the Sort field's value. +func (s *SearchImageSetsOutput) SetSort(v *Sort) *SearchImageSetsOutput { + s.Sort = v + return s +} + // The request caused a service quota to be exceeded. type ServiceQuotaExceededException struct { _ struct{} `type:"structure"` @@ -5552,6 +5655,67 @@ func (s *ServiceQuotaExceededException) RequestID() string { return s.RespMetadata.RequestID } +// Sort search results. +type Sort struct { + _ struct{} `type:"structure"` + + // The sort field for search criteria. + // + // SortField is a required field + SortField *string `locationName:"sortField" type:"string" required:"true" enum:"SortField"` + + // The sort order for search criteria. + // + // SortOrder is a required field + SortOrder *string `locationName:"sortOrder" type:"string" required:"true" enum:"SortOrder"` +} + +// String returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s Sort) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation. +// +// API parameter values that are decorated as "sensitive" in the API will not +// be included in the string output. The member name will be present, but the +// value will be replaced with "sensitive". +func (s Sort) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *Sort) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "Sort"} + if s.SortField == nil { + invalidParams.Add(request.NewErrParamRequired("SortField")) + } + if s.SortOrder == nil { + invalidParams.Add(request.NewErrParamRequired("SortOrder")) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetSortField sets the SortField field's value. +func (s *Sort) SetSortField(v string) *Sort { + s.SortField = &v + return s +} + +// SetSortOrder sets the SortOrder field's value. +func (s *Sort) SetSortOrder(v string) *Sort { + s.SortOrder = &v + return s +} + type StartDICOMImportJobInput struct { _ struct{} `type:"structure"` @@ -6381,3 +6545,39 @@ func Operator_Values() []string { OperatorBetween, } } + +const ( + // SortFieldUpdatedAt is a SortField enum value + SortFieldUpdatedAt = "updatedAt" + + // SortFieldCreatedAt is a SortField enum value + SortFieldCreatedAt = "createdAt" + + // SortFieldDicomstudyDateAndTime is a SortField enum value + SortFieldDicomstudyDateAndTime = "DICOMStudyDateAndTime" +) + +// SortField_Values returns all elements of the SortField enum +func SortField_Values() []string { + return []string{ + SortFieldUpdatedAt, + SortFieldCreatedAt, + SortFieldDicomstudyDateAndTime, + } +} + +const ( + // SortOrderAsc is a SortOrder enum value + SortOrderAsc = "ASC" + + // SortOrderDesc is a SortOrder enum value + SortOrderDesc = "DESC" +) + +// SortOrder_Values returns all elements of the SortOrder enum +func SortOrder_Values() []string { + return []string{ + SortOrderAsc, + SortOrderDesc, + } +} diff --git a/service/medicalimaging/doc.go b/service/medicalimaging/doc.go index e56bda9ae5e..9a2c82c4bac 100644 --- a/service/medicalimaging/doc.go +++ b/service/medicalimaging/doc.go @@ -3,24 +3,21 @@ // Package medicalimaging provides the client and types for making API // requests to AWS Health Imaging. // -// This is the AWS HealthImaging API Reference. AWS HealthImaging is a HIPAA-eligible -// service that helps health care providers and their medical imaging ISV partners -// store, transform, and apply machine learning to medical images. For an introduction -// to the service, see the AWS HealthImaging Developer Guide (https://docs.aws.amazon.com/healthimaging/latest/devguide/what-is.html). +// This is the AWS HealthImaging API Reference. AWS HealthImaging is a HIPAA +// eligible service that empowers healthcare providers, life science organizations, +// and their software partners to store, analyze, and share medical images in +// the cloud at petabyte scale. For an introduction to the service, see the +// AWS HealthImaging Developer Guide (https://docs.aws.amazon.com/healthimaging/latest/devguide/what-is.html). // // We recommend using one of the AWS Software Development Kits (SDKs) for your // programming language, as they take care of request authentication, serialization, // and connection management. For more information, see Tools to build on AWS // (http://aws.amazon.com/developer/tools). // -// For information about using HealthImaging API actions in one of the language-specific -// AWS SDKs, refer to the See Also link at the end of each section that describes -// an API action or data type. -// // The following sections list AWS HealthImaging API actions categorized according // to functionality. Links are provided to actions within this Reference, along // with links back to corresponding sections in the AWS HealthImaging Developer -// Guide where you can view console procedures and CLI/SDK code examples. +// Guide where you can view tested code examples. // // Data store actions // @@ -78,16 +75,13 @@ // Tagging actions // // - TagResource (https://docs.aws.amazon.com/healthimaging/latest/APIReference/API_TagResource.html) -// – See Tagging a data store (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-list-untag-data-store.html) -// and Tagging an image set (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-list-untag-image-set.html). +// – See Tagging a resource (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-resource.html). // // - ListTagsForResource (https://docs.aws.amazon.com/healthimaging/latest/APIReference/API_ListTagsForResource.html) -// – See Tagging a data store (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-list-untag-data-store.html) -// and Tagging an image set (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-list-untag-image-set.html). +// – See Listing tags for a resource (https://docs.aws.amazon.com/healthimaging/latest/devguide/list-tag-resource.html). // // - UntagResource (https://docs.aws.amazon.com/healthimaging/latest/APIReference/API_UntagResource.html) -// – See Tagging a data store (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-list-untag-data-store.html) -// and Tagging an image set (https://docs.aws.amazon.com/healthimaging/latest/devguide/tag-list-untag-image-set.html). +// – See Untagging a resource (https://docs.aws.amazon.com/healthimaging/latest/devguide/untag-resource.html). // // See https://docs.aws.amazon.com/goto/WebAPI/medical-imaging-2023-07-19 for more information on this service. // diff --git a/service/transfer/api.go b/service/transfer/api.go index 00e2a30eb16..33c6af459f1 100644 --- a/service/transfer/api.go +++ b/service/transfer/api.go @@ -2331,10 +2331,11 @@ func (c *Transfer) DescribeSecurityPolicyRequest(input *DescribeSecurityPolicyIn // DescribeSecurityPolicy API operation for AWS Transfer Family. // -// Describes the security policy that is attached to your file transfer protocol-enabled -// server. The response contains a description of the security policy's properties. +// Describes the security policy that is attached to your server or SFTP connector. +// The response contains a description of the security policy's properties. // For more information about security policies, see Working with security policies -// (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html). +// for servers (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html) +// or Working with security policies for SFTP connectors (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies-connectors.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -4006,8 +4007,10 @@ func (c *Transfer) ListSecurityPoliciesRequest(input *ListSecurityPoliciesInput) // ListSecurityPolicies API operation for AWS Transfer Family. // -// Lists the security policies that are attached to your file transfer protocol-enabled -// servers. +// Lists the security policies that are attached to your servers and SFTP connectors. +// For more information about security policies, see Working with security policies +// for servers (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html) +// or Working with security policies for SFTP connectors (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies-connectors.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -7250,6 +7253,9 @@ type CreateConnectorInput struct { // events. When set, you can view connector activity in your CloudWatch logs. LoggingRole *string `min:"20" type:"string"` + // Specifies the name of the security policy for the connector. + SecurityPolicyName *string `type:"string"` + // A structure that contains the parameters for an SFTP connector object. SftpConfig *SftpConnectorConfig `type:"structure"` @@ -7344,6 +7350,12 @@ func (s *CreateConnectorInput) SetLoggingRole(v string) *CreateConnectorInput { return s } +// SetSecurityPolicyName sets the SecurityPolicyName field's value. +func (s *CreateConnectorInput) SetSecurityPolicyName(v string) *CreateConnectorInput { + s.SecurityPolicyName = &v + return s +} + // SetSftpConfig sets the SftpConfig field's value. func (s *CreateConnectorInput) SetSftpConfig(v *SftpConnectorConfig) *CreateConnectorInput { s.SftpConfig = v @@ -7751,7 +7763,7 @@ type CreateServerInput struct { // Type to FILE if you want a mapping to have a file target. S3StorageOptions *S3StorageOptions `type:"structure"` - // Specifies the name of the security policy that is attached to the server. + // Specifies the name of the security policy for the server. SecurityPolicyName *string `type:"string"` // Specifies the log groups to which your server logs are sent. @@ -10209,7 +10221,7 @@ func (s *DescribeProfileOutput) SetProfile(v *DescribedProfile) *DescribeProfile type DescribeSecurityPolicyInput struct { _ struct{} `type:"structure"` - // Specifies the name of the security policy that is attached to the server. + // Specify the text name of the security policy for which you want the details. // // SecurityPolicyName is a required field SecurityPolicyName *string `type:"string" required:"true"` @@ -11062,6 +11074,9 @@ type DescribedConnector struct { // events. When set, you can view connector activity in your CloudWatch logs. LoggingRole *string `min:"20" type:"string"` + // The text name of the security policy for the specified connector. + SecurityPolicyName *string `type:"string"` + // The list of egress IP addresses of this connector. These IP addresses are // assigned automatically when you create the connector. ServiceManagedEgressIpAddresses []*string `type:"list"` @@ -11124,6 +11139,12 @@ func (s *DescribedConnector) SetLoggingRole(v string) *DescribedConnector { return s } +// SetSecurityPolicyName sets the SecurityPolicyName field's value. +func (s *DescribedConnector) SetSecurityPolicyName(v string) *DescribedConnector { + s.SecurityPolicyName = &v + return s +} + // SetServiceManagedEgressIpAddresses sets the ServiceManagedEgressIpAddresses field's value. func (s *DescribedConnector) SetServiceManagedEgressIpAddresses(v []*string) *DescribedConnector { s.ServiceManagedEgressIpAddresses = v @@ -11437,35 +11458,54 @@ func (s *DescribedProfile) SetTags(v []*Tag) *DescribedProfile { return s } -// Describes the properties of a security policy that was specified. For more -// information about security policies, see Working with security policies (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html). +// Describes the properties of a security policy that you specify. For more +// information about security policies, see Working with security policies for +// servers (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html) +// or Working with security policies for SFTP connectors (https://docs.aws.amazon.com/transfer/latest/userguide/security-policies-connectors.html). type DescribedSecurityPolicy struct { _ struct{} `type:"structure"` // Specifies whether this policy enables Federal Information Processing Standards - // (FIPS). + // (FIPS). This parameter applies to both server and connector security policies. Fips *bool `type:"boolean"` - // Specifies the name of the security policy that is attached to the server. + // Lists the file transfer protocols that the security policy applies to. + Protocols []*string `min:"1" type:"list" enum:"SecurityPolicyProtocol"` + + // The text name of the specified security policy. // // SecurityPolicyName is a required field SecurityPolicyName *string `type:"string" required:"true"` - // Specifies the enabled Secure Shell (SSH) cipher encryption algorithms in - // the security policy that is attached to the server. + // Lists the enabled Secure Shell (SSH) cipher encryption algorithms in the + // security policy that is attached to the server or connector. This parameter + // applies to both server and connector security policies. SshCiphers []*string `type:"list"` - // Specifies the enabled SSH key exchange (KEX) encryption algorithms in the - // security policy that is attached to the server. + // Lists the host key algorithms for the security policy. + // + // This parameter only applies to security policies for connectors. + SshHostKeyAlgorithms []*string `type:"list"` + + // Lists the enabled SSH key exchange (KEX) encryption algorithms in the security + // policy that is attached to the server or connector. This parameter applies + // to both server and connector security policies. SshKexs []*string `type:"list"` - // Specifies the enabled SSH message authentication code (MAC) encryption algorithms - // in the security policy that is attached to the server. + // Lists the enabled SSH message authentication code (MAC) encryption algorithms + // in the security policy that is attached to the server or connector. This + // parameter applies to both server and connector security policies. SshMacs []*string `type:"list"` - // Specifies the enabled Transport Layer Security (TLS) cipher encryption algorithms + // Lists the enabled Transport Layer Security (TLS) cipher encryption algorithms // in the security policy that is attached to the server. + // + // This parameter only applies to security policies for servers. TlsCiphers []*string `type:"list"` + + // The resource type to which the security policy applies, either server or + // connector. + Type *string `type:"string" enum:"SecurityPolicyResourceType"` } // String returns the string representation. @@ -11492,6 +11532,12 @@ func (s *DescribedSecurityPolicy) SetFips(v bool) *DescribedSecurityPolicy { return s } +// SetProtocols sets the Protocols field's value. +func (s *DescribedSecurityPolicy) SetProtocols(v []*string) *DescribedSecurityPolicy { + s.Protocols = v + return s +} + // SetSecurityPolicyName sets the SecurityPolicyName field's value. func (s *DescribedSecurityPolicy) SetSecurityPolicyName(v string) *DescribedSecurityPolicy { s.SecurityPolicyName = &v @@ -11504,6 +11550,12 @@ func (s *DescribedSecurityPolicy) SetSshCiphers(v []*string) *DescribedSecurityP return s } +// SetSshHostKeyAlgorithms sets the SshHostKeyAlgorithms field's value. +func (s *DescribedSecurityPolicy) SetSshHostKeyAlgorithms(v []*string) *DescribedSecurityPolicy { + s.SshHostKeyAlgorithms = v + return s +} + // SetSshKexs sets the SshKexs field's value. func (s *DescribedSecurityPolicy) SetSshKexs(v []*string) *DescribedSecurityPolicy { s.SshKexs = v @@ -11522,6 +11574,12 @@ func (s *DescribedSecurityPolicy) SetTlsCiphers(v []*string) *DescribedSecurityP return s } +// SetType sets the Type field's value. +func (s *DescribedSecurityPolicy) SetType(v string) *DescribedSecurityPolicy { + s.Type = &v + return s +} + // Describes the properties of a file transfer protocol-enabled server that // was specified. type DescribedServer struct { @@ -11676,7 +11734,7 @@ type DescribedServer struct { // Type to FILE if you want a mapping to have a file target. S3StorageOptions *S3StorageOptions `type:"structure"` - // Specifies the name of the security policy that is attached to the server. + // Specifies the name of the security policy for the server. SecurityPolicyName *string `type:"string"` // Specifies the unique system-assigned identifier for a server that you instantiate. @@ -18304,6 +18362,9 @@ type UpdateConnectorInput struct { // events. When set, you can view connector activity in your CloudWatch logs. LoggingRole *string `min:"20" type:"string"` + // Specifies the name of the security policy for the connector. + SecurityPolicyName *string `type:"string"` + // A structure that contains the parameters for an SFTP connector object. SftpConfig *SftpConnectorConfig `type:"structure"` @@ -18385,6 +18446,12 @@ func (s *UpdateConnectorInput) SetLoggingRole(v string) *UpdateConnectorInput { return s } +// SetSecurityPolicyName sets the SecurityPolicyName field's value. +func (s *UpdateConnectorInput) SetSecurityPolicyName(v string) *UpdateConnectorInput { + s.SecurityPolicyName = &v + return s +} + // SetSftpConfig sets the SftpConfig field's value. func (s *UpdateConnectorInput) SetSftpConfig(v *SftpConnectorConfig) *UpdateConnectorInput { s.SftpConfig = v @@ -18835,7 +18902,7 @@ type UpdateServerInput struct { // Type to FILE if you want a mapping to have a file target. S3StorageOptions *S3StorageOptions `type:"structure"` - // Specifies the name of the security policy that is attached to the server. + // Specifies the name of the security policy for the server. SecurityPolicyName *string `type:"string"` // A system-assigned unique identifier for a server instance that the Transfer @@ -20118,6 +20185,38 @@ func Protocol_Values() []string { } } +const ( + // SecurityPolicyProtocolSftp is a SecurityPolicyProtocol enum value + SecurityPolicyProtocolSftp = "SFTP" + + // SecurityPolicyProtocolFtps is a SecurityPolicyProtocol enum value + SecurityPolicyProtocolFtps = "FTPS" +) + +// SecurityPolicyProtocol_Values returns all elements of the SecurityPolicyProtocol enum +func SecurityPolicyProtocol_Values() []string { + return []string{ + SecurityPolicyProtocolSftp, + SecurityPolicyProtocolFtps, + } +} + +const ( + // SecurityPolicyResourceTypeServer is a SecurityPolicyResourceType enum value + SecurityPolicyResourceTypeServer = "SERVER" + + // SecurityPolicyResourceTypeConnector is a SecurityPolicyResourceType enum value + SecurityPolicyResourceTypeConnector = "CONNECTOR" +) + +// SecurityPolicyResourceType_Values returns all elements of the SecurityPolicyResourceType enum +func SecurityPolicyResourceType_Values() []string { + return []string{ + SecurityPolicyResourceTypeServer, + SecurityPolicyResourceTypeConnector, + } +} + const ( // SetStatOptionDefault is a SetStatOption enum value SetStatOptionDefault = "DEFAULT"