/
api_op_CreateStreamProcessor.go
222 lines (199 loc) · 9.05 KB
/
api_op_CreateStreamProcessor.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
// Code generated by smithy-go-codegen DO NOT EDIT.
package rekognition
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/rekognition/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Creates an Amazon Rekognition stream processor that you can use to detect and
// recognize faces or to detect labels in a streaming video. Amazon Rekognition
// Video is a consumer of live video from Amazon Kinesis Video Streams. There are
// two different settings for stream processors in Amazon Rekognition: detecting
// faces and detecting labels.
//
// * If you are creating a stream processor for
// detecting faces, you provide as input a Kinesis video stream (Input) and a
// Kinesis data stream (Output) stream. You also specify the face recognition
// criteria in Settings. For example, the collection containing faces that you want
// to recognize. After you have finished analyzing a streaming video, use
// StopStreamProcessor to stop processing.
//
// * If you are creating a stream
// processor to detect labels, you provide as input a Kinesis video stream (Input),
// Amazon S3 bucket information (Output), and an Amazon SNS topic ARN
// (NotificationChannel). You can also provide a KMS key ID to encrypt the data
// sent to your Amazon S3 bucket. You specify what you want to detect in
// ConnectedHomeSettings, such as people, packages and people, or pets, people, and
// packages. You can also specify where in the frame you want Amazon Rekognition to
// monitor with RegionsOfInterest. When you run the StartStreamProcessor operation
// on a label detection stream processor, you input start and stop information to
// determine the length of the processing time.
//
// Use Name to assign an identifier
// for the stream processor. You use Name to manage the stream processor. For
// example, you can start processing the source video by calling
// StartStreamProcessor with the Name field. This operation requires permissions to
// perform the rekognition:CreateStreamProcessor action. If you want to tag your
// stream processor, you also require permission to perform the
// rekognition:TagResource operation.
func (c *Client) CreateStreamProcessor(ctx context.Context, params *CreateStreamProcessorInput, optFns ...func(*Options)) (*CreateStreamProcessorOutput, error) {
if params == nil {
params = &CreateStreamProcessorInput{}
}
result, metadata, err := c.invokeOperation(ctx, "CreateStreamProcessor", params, optFns, c.addOperationCreateStreamProcessorMiddlewares)
if err != nil {
return nil, err
}
out := result.(*CreateStreamProcessorOutput)
out.ResultMetadata = metadata
return out, nil
}
type CreateStreamProcessorInput struct {
// Kinesis video stream stream that provides the source streaming video. If you are
// using the AWS CLI, the parameter name is StreamProcessorInput. This is required
// for both face search and label detection stream processors.
//
// This member is required.
Input *types.StreamProcessorInput
// An identifier you assign to the stream processor. You can use Name to manage the
// stream processor. For example, you can get the current status of the stream
// processor by calling DescribeStreamProcessor. Name is idempotent. This is
// required for both face search and label detection stream processors.
//
// This member is required.
Name *string
// Kinesis data stream stream or Amazon S3 bucket location to which Amazon
// Rekognition Video puts the analysis results. If you are using the AWS CLI, the
// parameter name is StreamProcessorOutput. This must be a S3Destination of an
// Amazon S3 bucket that you own for a label detection stream processor or a
// Kinesis data stream ARN for a face search stream processor.
//
// This member is required.
Output *types.StreamProcessorOutput
// The Amazon Resource Number (ARN) of the IAM role that allows access to the
// stream processor. The IAM role provides Rekognition read permissions for a
// Kinesis stream. It also provides write permissions to an Amazon S3 bucket and
// Amazon Simple Notification Service topic for a label detection stream processor.
// This is required for both face search and label detection stream processors.
//
// This member is required.
RoleArn *string
// Input parameters used in a streaming video analyzed by a stream processor. You
// can use FaceSearch to recognize faces in a streaming video, or you can use
// ConnectedHome to detect labels.
//
// This member is required.
Settings *types.StreamProcessorSettings
// Shows whether you are sharing data with Rekognition to improve model
// performance. You can choose this option at the account level or on a per-stream
// basis. Note that if you opt out at the account level this setting is ignored on
// individual streams.
DataSharingPreference *types.StreamProcessorDataSharingPreference
// The identifier for your AWS Key Management Service key (AWS KMS key). This is an
// optional parameter for label detection stream processors and should not be used
// to create a face search stream processor. You can supply the Amazon Resource
// Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key,
// or an alias ARN. The key is used to encrypt results and data published to your
// Amazon S3 bucket, which includes image frames and hero images. Your source
// images are unaffected.
KmsKeyId *string
// The Amazon Simple Notification Service topic to which Amazon Rekognition
// publishes the object detection results and completion status of a video analysis
// operation. Amazon Rekognition publishes a notification the first time an object
// of interest or a person is detected in the video stream. For example, if Amazon
// Rekognition detects a person at second 2, a pet at second 4, and a person again
// at second 5, Amazon Rekognition sends 2 object class detected notifications, one
// for a person at second 2 and one for a pet at second 4. Amazon Rekognition also
// publishes an an end-of-session notification with a summary when the stream
// processing session is complete.
NotificationChannel *types.StreamProcessorNotificationChannel
// Specifies locations in the frames where Amazon Rekognition checks for objects or
// people. You can specify up to 10 regions of interest, and each region has either
// a polygon or a bounding box. This is an optional parameter for label detection
// stream processors and should not be used to create a face search stream
// processor.
RegionsOfInterest []types.RegionOfInterest
// A set of tags (key-value pairs) that you want to attach to the stream processor.
Tags map[string]string
noSmithyDocumentSerde
}
type CreateStreamProcessorOutput struct {
// Amazon Resource Number for the newly created stream processor.
StreamProcessorArn *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationCreateStreamProcessorMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpCreateStreamProcessor{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpCreateStreamProcessor{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpCreateStreamProcessorValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opCreateStreamProcessor(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opCreateStreamProcessor(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "rekognition",
OperationName: "CreateStreamProcessor",
}
}