/
api_op_CreateDatasetExportJob.go
158 lines (140 loc) · 5.06 KB
/
api_op_CreateDatasetExportJob.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
// Code generated by smithy-go-codegen DO NOT EDIT.
package personalize
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/personalize/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Creates a job that exports data from your dataset to an Amazon S3 bucket. To
// allow Amazon Personalize to export the training data, you must specify an
// service-linked IAM role that gives Amazon Personalize PutObject permissions for
// your Amazon S3 bucket. For information, see Exporting a dataset
// (https://docs.aws.amazon.com/personalize/latest/dg/export-data.html) in the
// Amazon Personalize developer guide. Status A dataset export job can be in one of
// the following states:
//
// * CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
// CREATE FAILED
//
// To get the status of the export job, call
// DescribeDatasetExportJob, and specify the Amazon Resource Name (ARN) of the
// dataset export job. The dataset export is complete when the status shows as
// ACTIVE. If the status shows as CREATE FAILED, the response includes a
// failureReason key, which describes why the job failed.
func (c *Client) CreateDatasetExportJob(ctx context.Context, params *CreateDatasetExportJobInput, optFns ...func(*Options)) (*CreateDatasetExportJobOutput, error) {
if params == nil {
params = &CreateDatasetExportJobInput{}
}
result, metadata, err := c.invokeOperation(ctx, "CreateDatasetExportJob", params, optFns, c.addOperationCreateDatasetExportJobMiddlewares)
if err != nil {
return nil, err
}
out := result.(*CreateDatasetExportJobOutput)
out.ResultMetadata = metadata
return out, nil
}
type CreateDatasetExportJobInput struct {
// The Amazon Resource Name (ARN) of the dataset that contains the data to export.
//
// This member is required.
DatasetArn *string
// The name for the dataset export job.
//
// This member is required.
JobName *string
// The path to the Amazon S3 bucket where the job's output is stored.
//
// This member is required.
JobOutput *types.DatasetExportJobOutput
// The Amazon Resource Name (ARN) of the IAM service role that has permissions to
// add data to your output Amazon S3 bucket.
//
// This member is required.
RoleArn *string
// The data to export, based on how you imported the data. You can choose to export
// only BULK data that you imported using a dataset import job, only PUT data that
// you imported incrementally (using the console, PutEvents, PutUsers and PutItems
// operations), or ALL for both types. The default value is PUT.
IngestionMode types.IngestionMode
noSmithyDocumentSerde
}
type CreateDatasetExportJobOutput struct {
// The Amazon Resource Name (ARN) of the dataset export job.
DatasetExportJobArn *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationCreateDatasetExportJobMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpCreateDatasetExportJob{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpCreateDatasetExportJob{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpCreateDatasetExportJobValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opCreateDatasetExportJob(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opCreateDatasetExportJob(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "personalize",
OperationName: "CreateDatasetExportJob",
}
}