-
Notifications
You must be signed in to change notification settings - Fork 271
/
create_inference_server.go
106 lines (97 loc) · 3.81 KB
/
create_inference_server.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
package mts
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// CreateInferenceServer invokes the mts.CreateInferenceServer API synchronously
func (client *Client) CreateInferenceServer(request *CreateInferenceServerRequest) (response *CreateInferenceServerResponse, err error) {
response = CreateCreateInferenceServerResponse()
err = client.DoAction(request, response)
return
}
// CreateInferenceServerWithChan invokes the mts.CreateInferenceServer API asynchronously
func (client *Client) CreateInferenceServerWithChan(request *CreateInferenceServerRequest) (<-chan *CreateInferenceServerResponse, <-chan error) {
responseChan := make(chan *CreateInferenceServerResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.CreateInferenceServer(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// CreateInferenceServerWithCallback invokes the mts.CreateInferenceServer API asynchronously
func (client *Client) CreateInferenceServerWithCallback(request *CreateInferenceServerRequest, callback func(response *CreateInferenceServerResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *CreateInferenceServerResponse
var err error
defer close(result)
response, err = client.CreateInferenceServer(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// CreateInferenceServerRequest is the request struct for api CreateInferenceServer
type CreateInferenceServerRequest struct {
*requests.RpcRequest
PipelineId string `position:"Query" name:"PipelineId"`
UserData string `position:"Query" name:"UserData"`
ModelType string `position:"Query" name:"ModelType"`
FunctionName string `position:"Query" name:"FunctionName"`
TestId string `position:"Query" name:"TestId"`
ModelPath string `position:"Query" name:"ModelPath"`
}
// CreateInferenceServerResponse is the response struct for api CreateInferenceServer
type CreateInferenceServerResponse struct {
*responses.BaseResponse
Message string `json:"Message" xml:"Message"`
RequestId string `json:"RequestId" xml:"RequestId"`
Code string `json:"Code" xml:"Code"`
}
// CreateCreateInferenceServerRequest creates a request to invoke CreateInferenceServer API
func CreateCreateInferenceServerRequest() (request *CreateInferenceServerRequest) {
request = &CreateInferenceServerRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Mts", "2014-06-18", "CreateInferenceServer", "mts", "openAPI")
request.Method = requests.POST
return
}
// CreateCreateInferenceServerResponse creates a response to parse from CreateInferenceServer response
func CreateCreateInferenceServerResponse() (response *CreateInferenceServerResponse) {
response = &CreateInferenceServerResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}