forked from galaxydi/go-loghub
-
Notifications
You must be signed in to change notification settings - Fork 110
/
client_job.go
290 lines (260 loc) · 8.28 KB
/
client_job.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
package sls
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/url"
)
const (
DataSourceOSS DataSourceType = "AliyunOSS"
DataSourceBSS DataSourceType = "AliyunBSS"
DataSourceMaxCompute DataSourceType = "AliyunMaxCompute"
DataSourceJDBC DataSourceType = "JDBC"
DataSourceKafka DataSourceType = "Kafka"
DataSourceCMS DataSourceType = "AliyunCloudMonitor"
DataSourceGeneral DataSourceType = "General"
OSSDataFormatTypeLine OSSDataFormatType = "Line"
OSSDataFormatTypeMultiline OSSDataFormatType = "Multiline"
OSSDataFormatTypeJSON OSSDataFormatType = "JSON"
OSSDataFormatTypeParquet OSSDataFormatType = "Parquet"
OSSDataFormatTypeDelimitedText OSSDataFormatType = "DelimitedText"
KafkaValueTypeText KafkaValueType = "Text"
KafkaValueTypeJSON KafkaValueType = "JSON"
KafkaPositionGroupOffsets KafkaPosition = "GROUP_OFFSETS"
KafkaPositionEarliest KafkaPosition = "EARLIEST"
KafkaPositionLatest KafkaPosition = "LATEST"
KafkaPositionTimeStamp KafkaPosition = "TIMESTAMP"
)
type (
BaseJob struct {
Name string `json:"name"`
DisplayName string `json:"displayName,omitempty"`
Description string `json:"description,omitempty"`
Type JobType `json:"type"`
Recyclable bool `json:"recyclable"`
CreateTime int64 `json:"createTime"`
LastModifyTime int64 `json:"lastModifyTime"`
}
ScheduledJob struct {
BaseJob
Status string `json:"status"`
Schedule *Schedule `json:"schedule"`
ScheduleId string `json:"scheduleId"`
}
Ingestion struct {
ScheduledJob
IngestionConfiguration *IngestionConfiguration `json:"configuration"`
}
IngestionConfiguration struct {
Version string `json:"version"`
LogStore string `json:"logstore"`
NumberOfInstance int32 `json:"numberOfInstance"`
DataSource interface{} `json:"source"`
}
DataSourceType string
DataSource struct {
DataSourceType DataSourceType `json:"type"`
}
// >>> ingestion oss source
OSSDataFormatType string
AliyunOSSSource struct {
DataSource
Bucket string `json:"bucket"`
Endpoint string `json:"endpoint"`
RoleArn string `json:"roleARN"`
Prefix string `json:"prefix,omitempty"`
Pattern string `json:"pattern,omitempty"`
CompressionCodec string `json:"compressionCodec,omitempty"`
Encoding string `json:"encoding,omitempty"`
Format interface{} `json:"format,omitempty"`
RestoreObjectEnable bool `json:"restoreObjectEnable"`
LastModifyTimeAsLogTime bool `json:"lastModifyTimeAsLogTime"`
}
OSSDataFormat struct {
Type OSSDataFormatType `json:"type"`
TimeFormat string `json:"timeFormat"`
TimeZone string `json:"timeZone"`
}
LineFormat struct {
OSSDataFormat
TimePattern string `json:"timePattern"`
}
MultiLineFormat struct {
LineFormat
MaxLines int64 `json:"maxLines,omitempty"`
Negate bool `json:"negate"`
Match string `json:"match"`
Pattern string `json:"pattern"`
FlushPattern string `json:"flushPattern"`
}
StructureDataFormat struct {
OSSDataFormat
TimeField string `json:"timeField"`
}
JSONFormat struct {
StructureDataFormat
SkipInvalidRows bool `json:"skipInvalidRows"`
}
ParquetFormat struct {
StructureDataFormat
}
DelimitedTextFormat struct {
StructureDataFormat
FieldNames []string `json:"fieldNames"`
FieldDelimiter string `json:"fieldDelimiter"`
QuoteChar string `json:"quoteChar"`
EscapeChar string `json:"escapeChar"`
SkipLeadingRows int64 `json:"skipLeadingRows"`
MaxLines int64 `json:"maxLines"`
FirstRowAsHeader bool `json:"firstRowAsHeader"`
}
// ingestion maxcompute source >>>
AliyunMaxComputeSource struct {
DataSource
AccessKeyID string `json:"accessKeyID"`
AccessKeySecret string `json:"accessKeySecret"`
Endpoint string `json:"endpoint"`
TunnelEndpoint string `json:"tunnelEndpoint,omitempty"`
Project string `json:"project"`
Table string `json:"table"`
PartitionSpec string `json:"partitionSpec"`
TimeField string `json:"timeField"`
TimeFormat string `json:"timeFormat"`
TimeZone string `json:"timeZone"`
}
// ingestion cloud monitor source
AliyunCloudMonitorSource struct {
DataSource
AccessKeyID string `json:"accessKeyID"`
AccessKeySecret string `json:"accessKeySecret"`
StartTime int64 `json:"startTime"`
Namespaces []string `json:"namespaces"`
OutputType string `json:"outputType"`
DelayTime int64 `json:"delayTime"`
}
// ingestion kafka source
KafkaValueType string
KafkaPosition string
KafkaSource struct {
DataSource
Topics string `json:"topics"`
BootStrapServers string `json:"bootstrapServers"`
ValueType KafkaValueType `json:"valueType"`
FromPosition KafkaPosition `json:"fromPosition"`
FromTimeStamp int64 `json:"fromTimestamp"`
ToTimeStamp int64 `json:"toTimestamp"`
TimeField string `json:"timeField"`
TimePattern string `json:"timePattern"`
TimeFormat string `json:"timeFormat"`
TimeZone string `json:"timeZone"`
AdditionalProps map[string]string `json:"additionalProps"`
}
// ingestion JDBC source
AliyunBssSource struct {
DataSource
RoleArn string `json:"roleARN"`
HistoryMonth int64 `json:"historyMonth"`
}
// ingestion general source
IngestionGeneralSource struct {
DataSource
Fields map[string]interface{}
}
)
func (c *Client) CreateIngestion(project string, ingestion *Ingestion) error {
body, err := json.Marshal(ingestion)
if err != nil {
return NewClientError(err)
}
h := map[string]string{
"x-log-bodyrawsize": fmt.Sprintf("%v", len(body)),
"Content-Type": "application/json",
}
uri := "/jobs"
r, err := c.request(project, "POST", uri, h, body)
if err != nil {
return err
}
r.Body.Close()
return nil
}
func (c *Client) UpdateIngestion(project string, ingestion *Ingestion) error {
body, err := json.Marshal(ingestion)
if err != nil {
return NewClientError(err)
}
h := map[string]string{
"x-log-bodyrawsize": fmt.Sprintf("%v", len(body)),
"Content-Type": "application/json",
}
uri := "/jobs/" + ingestion.Name
r, err := c.request(project, "PUT", uri, h, body)
if err != nil {
return err
}
r.Body.Close()
return nil
}
func (c *Client) GetIngestion(project string, name string) (*Ingestion, error) {
h := map[string]string{
"x-log-bodyrawsize": "0",
"Content-Type": "application/json",
}
uri := "/jobs/" + name
r, err := c.request(project, "GET", uri, h, nil)
if err != nil {
return nil, err
}
defer r.Body.Close()
buf, _ := ioutil.ReadAll(r.Body)
ingestion := &Ingestion{}
if err = json.Unmarshal(buf, ingestion); err != nil {
err = NewClientError(err)
}
return ingestion, err
}
func (c *Client) ListIngestion(project, logstore, name, displayName string, offset, size int) (ingestions []*Ingestion, total, count int, error error) {
h := map[string]string{
"x-log-bodyrawsize": "0",
"Content-Type": "application/json",
}
v := url.Values{}
v.Add("logstore", logstore)
v.Add("jobName", name)
if displayName != "" {
v.Add("displayName", displayName)
}
v.Add("jobType", "Ingestion")
v.Add("offset", fmt.Sprintf("%d", offset))
v.Add("size", fmt.Sprintf("%d", size))
uri := "/jobs?" + v.Encode()
r, err := c.request(project, "GET", uri, h, nil)
if err != nil {
return nil, 0, 0, err
}
defer r.Body.Close()
type ingestionList struct {
Total int `json:"total"`
Count int `json:"count"`
Results []*Ingestion `json:"results"`
}
buf, _ := ioutil.ReadAll(r.Body)
is := &ingestionList{}
if err = json.Unmarshal(buf, is); err != nil {
err = NewClientError(err)
}
return is.Results, is.Total, is.Count, err
}
func (c *Client) DeleteIngestion(project string, name string) error {
h := map[string]string{
"x-log-bodyrawsize": "0",
"Content-Type": "application/json",
}
uri := "/jobs/" + name
r, err := c.request(project, "DELETE", uri, h, nil)
if err != nil {
return err
}
r.Body.Close()
return nil
}