/
provider.go
104 lines (94 loc) · 2.87 KB
/
provider.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
// Copyright (c) 2021 Terminus, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analysis
import (
"fmt"
"sync/atomic"
"time"
"github.com/jinzhu/gorm"
"github.com/erda-project/erda-infra/base/logs"
"github.com/erda-project/erda-infra/base/servicehub"
writer "github.com/erda-project/erda-infra/pkg/parallel-writer"
"github.com/erda-project/erda-infra/providers/kafka"
"github.com/erda-project/erda-infra/providers/mysql"
"github.com/erda-project/erda/modules/extensions/loghub/metrics/rules/db"
)
type config struct {
Filters map[string]string `file:"filters"`
Processors struct {
Scope string `file:"scope"`
ScopeID string `file:"scope_id"`
ScopeIDKey string `file:"scope_id_key"`
ReloadInterval time.Duration `file:"reload_interval" default:"3m"`
} `file:"processors"`
Input kafka.ConsumerConfig `file:"input"`
Output struct {
Type string `file:"type"`
Kafka kafka.ProducerConfig `file:"kafka"`
Collector struct {
URL string `file:"url"`
UserName string `file:"username"`
Password string `file:"password"`
} `file:"collector"`
} `file:"output"`
}
type provider struct {
C *config
L logs.Logger
mysql *gorm.DB
kafka kafka.Interface
output writer.Writer
processors atomic.Value
db *db.DB
}
func (p *provider) Init(ctx servicehub.Context) error {
p.mysql = ctx.Service("mysql").(mysql.Interface).DB()
p.db = db.New(p.mysql)
p.kafka = ctx.Service("kafka").(kafka.Interface)
w, err := p.kafka.NewProducer(&p.C.Output.Kafka)
if err != nil {
return fmt.Errorf("fail to create kafka producer: %s", err)
}
p.output = w
return nil
}
// Start .
func (p *provider) Start() error {
err := p.kafka.NewConsumer(&p.C.Input, p.invoke)
if err != nil {
return err
}
go func() {
for {
err := p.loadProcessors()
if err != nil {
p.L.Errorf("fail to load processors: %s", err)
}
time.Sleep(p.C.Processors.ReloadInterval)
}
}()
return nil
}
func (p *provider) Close() error { return nil }
func init() {
servicehub.Register("logs-metrics-analysis", &servicehub.Spec{
Services: []string{"logs-metrics-analysis"},
Dependencies: []string{"kafka", "mysql"},
Description: "parse logs to metrics",
ConfigFunc: func() interface{} { return &config{} },
Creator: func() servicehub.Provider {
return &provider{}
},
})
}