/
api_op_RecognizeCelebrities.go
166 lines (150 loc) · 6.7 KB
/
api_op_RecognizeCelebrities.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
// Code generated by smithy-go-codegen DO NOT EDIT.
package rekognition
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/rekognition/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Returns an array of celebrities recognized in the input image. For more
// information, see Recognizing Celebrities in the Amazon Rekognition Developer
// Guide. RecognizeCelebrities returns the 64 largest faces in the image. It lists
// the recognized celebrities in the CelebrityFaces array and any unrecognized
// faces in the UnrecognizedFaces array. RecognizeCelebrities doesn't return
// celebrities whose faces aren't among the largest 64 faces in the image. For each
// celebrity recognized, RecognizeCelebrities returns a Celebrity object. The
// Celebrity object contains the celebrity name, ID, URL links to additional
// information, match confidence, and a ComparedFace object that you can use to
// locate the celebrity's face on the image. Amazon Rekognition doesn't retain
// information about which images a celebrity has been recognized in. Your
// application must store this information and use the Celebrity ID property as a
// unique identifier for the celebrity. If you don't store the celebrity name or
// additional information URLs returned by RecognizeCelebrities, you will need the
// ID to identify the celebrity in a call to the GetCelebrityInfo operation. You
// pass the input image either as base64-encoded image bytes or as a reference to
// an image in an Amazon S3 bucket. If you use the AWS CLI to call Amazon
// Rekognition operations, passing image bytes is not supported. The image must be
// either a PNG or JPEG formatted file. For an example, see Recognizing Celebrities
// in an Image in the Amazon Rekognition Developer Guide. This operation requires
// permissions to perform the rekognition:RecognizeCelebrities operation.
func (c *Client) RecognizeCelebrities(ctx context.Context, params *RecognizeCelebritiesInput, optFns ...func(*Options)) (*RecognizeCelebritiesOutput, error) {
if params == nil {
params = &RecognizeCelebritiesInput{}
}
result, metadata, err := c.invokeOperation(ctx, "RecognizeCelebrities", params, optFns, c.addOperationRecognizeCelebritiesMiddlewares)
if err != nil {
return nil, err
}
out := result.(*RecognizeCelebritiesOutput)
out.ResultMetadata = metadata
return out, nil
}
type RecognizeCelebritiesInput struct {
// The input image as base64-encoded bytes or an S3 object. If you use the AWS CLI
// to call Amazon Rekognition operations, passing base64-encoded image bytes is not
// supported. If you are using an AWS SDK to call Amazon Rekognition, you might not
// need to base64-encode image bytes passed using the Bytes field. For more
// information, see Images in the Amazon Rekognition developer guide.
//
// This member is required.
Image *types.Image
noSmithyDocumentSerde
}
type RecognizeCelebritiesOutput struct {
// Details about each celebrity found in the image. Amazon Rekognition can detect a
// maximum of 64 celebrities in an image. Each celebrity object includes the
// following attributes: Face, Confidence, Emotions, Landmarks, Pose, Quality,
// Smile, Id, KnownGender, MatchConfidence, Name, Urls.
CelebrityFaces []types.Celebrity
// Support for estimating image orientation using the the OrientationCorrection
// field has ceased as of August 2021. Any returned values for this field included
// in an API response will always be NULL. The orientation of the input image
// (counterclockwise direction). If your application displays the image, you can
// use this value to correct the orientation. The bounding box coordinates returned
// in CelebrityFaces and UnrecognizedFaces represent face locations before the
// image orientation is corrected. If the input image is in .jpeg format, it might
// contain exchangeable image (Exif) metadata that includes the image's
// orientation. If so, and the Exif metadata for the input image populates the
// orientation field, the value of OrientationCorrection is null. The
// CelebrityFaces and UnrecognizedFaces bounding box coordinates represent face
// locations after Exif metadata is used to correct the image orientation. Images
// in .png format don't contain Exif metadata.
OrientationCorrection types.OrientationCorrection
// Details about each unrecognized face in the image.
UnrecognizedFaces []types.ComparedFace
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationRecognizeCelebritiesMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpRecognizeCelebrities{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpRecognizeCelebrities{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpRecognizeCelebritiesValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opRecognizeCelebrities(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opRecognizeCelebrities(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "rekognition",
OperationName: "RecognizeCelebrities",
}
}