forked from pwillie/prometheus-es-adapter
/
read.go
116 lines (104 loc) · 3.04 KB
/
read.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
package elasticsearch
import (
"context"
"encoding/json"
"fmt"
elastic "github.com/olivere/elastic/v7"
"github.com/prometheus/prometheus/prompb"
"go.uber.org/zap"
)
// ReadService will proxy Prometheus queries to Elasticsearch
type ReadService struct {
client *elastic.Client
config *ReadConfig
logger *zap.Logger
}
// ReadConfig configures the ReadService
type ReadConfig struct {
Alias string
MaxDocs int
}
// NewReadService will create a new ReadService
func NewReadService(logger *zap.Logger, client *elastic.Client, config *ReadConfig) *ReadService {
svc := &ReadService{
client: client,
config: config,
logger: logger,
}
// TODO: add stats
return svc
}
// Read will perform Elasticsearch query
func (svc *ReadService) Read(ctx context.Context, req []*prompb.Query) ([]*prompb.QueryResult, error) {
results := make([]*prompb.QueryResult, 0, len(req))
for _, q := range req {
resp, err := svc.buildCommand(q).Do(ctx)
if err != nil {
return nil, err
}
svc.logger.Debug(fmt.Sprintf("Query returned results %+v", resp.Hits.TotalHits))
ts, err := svc.createTimeseries(resp.Hits)
if err != nil {
return nil, err
}
results = append(results, &prompb.QueryResult{Timeseries: ts})
}
return results, nil
}
func (svc *ReadService) buildCommand(q *prompb.Query) *elastic.SearchService {
query := elastic.NewBoolQuery()
for _, m := range q.Matchers {
switch m.Type {
case prompb.LabelMatcher_EQ:
query = query.Filter(elastic.NewTermQuery("label."+m.Name, m.Value))
case prompb.LabelMatcher_NEQ:
query = query.MustNot(elastic.NewTermQuery("label."+m.Name, m.Value))
case prompb.LabelMatcher_RE:
query = query.Filter(elastic.NewRegexpQuery("label."+m.Name, m.Value))
case prompb.LabelMatcher_NRE:
query = query.MustNot(elastic.NewRegexpQuery("label."+m.Name, m.Value))
default:
svc.logger.Panic("unknown match", zap.String("type", m.Type.String()))
}
}
query = query.Filter(elastic.NewRangeQuery("timestamp").Gte(q.StartTimestampMs).Lte(q.EndTimestampMs))
return svc.client.Search().
Index(svc.config.Alias+"-*").
Type(sampleType).
Query(query).
Size(svc.config.MaxDocs).
Sort("timestamp", true)
}
func (svc *ReadService) createTimeseries(results *elastic.SearchHits) ([]*prompb.TimeSeries, error) {
tsMap := make(map[string]*prompb.TimeSeries)
for _, r := range results.Hits {
var s prometheusSample
if err := json.Unmarshal(r.Source, &r); err != nil {
svc.logger.Fatal("Failed to unmarshal sample", zap.Error(err))
}
fingerprint := s.Labels.Fingerprint().String()
ts, ok := tsMap[fingerprint]
if !ok {
labels := make([]*prompb.Label, 0, len(s.Labels))
for k, v := range s.Labels {
labels = append(labels, &prompb.Label{
Name: string(k),
Value: string(v),
})
}
ts = &prompb.TimeSeries{
Labels: labels,
}
tsMap[fingerprint] = ts
}
ts.Samples = append(ts.Samples, prompb.Sample{
Value: s.Value,
Timestamp: s.Timestamp,
})
}
ret := make([]*prompb.TimeSeries, 0, len(tsMap))
for _, s := range tsMap {
ret = append(ret, s)
}
return ret, nil
}