/
scrape.go
50 lines (43 loc) · 1.21 KB
/
scrape.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
package sync
import (
"time"
"github.com/bakito/adguardhome-sync/pkg/metrics"
"github.com/bakito/adguardhome-sync/pkg/types"
)
func (w *worker) startScraping() {
metrics.Init()
if w.cfg.API.Metrics.ScrapeInterval == 0 {
w.cfg.API.Metrics.ScrapeInterval = 30 * time.Second
}
if w.cfg.API.Metrics.QueryLogLimit == 0 {
w.cfg.API.Metrics.QueryLogLimit = 10_000
}
l.With(
"scrape-interval", w.cfg.API.Metrics.ScrapeInterval,
"query-log-limit", w.cfg.API.Metrics.QueryLogLimit,
).Info("setup metrics")
w.scrape()
for range time.Tick(w.cfg.API.Metrics.ScrapeInterval) {
w.scrape()
}
}
func (w *worker) scrape() {
var ims []metrics.InstanceMetrics
ims = append(ims, w.getMetrics(w.cfg.Origin))
for _, replica := range w.cfg.Replicas {
ims = append(ims, w.getMetrics(replica))
}
metrics.Update(ims...)
}
func (w *worker) getMetrics(inst types.AdGuardInstance) (im metrics.InstanceMetrics) {
client, err := w.createClient(inst)
if err != nil {
l.With("error", err, "url", w.cfg.Origin.URL).Error("Error creating origin client")
return
}
im.HostName = inst.Host
im.Status, _ = client.Status()
im.Stats, _ = client.Stats()
im.QueryLog, _ = client.QueryLog(w.cfg.API.Metrics.QueryLogLimit)
return
}