Skip to content

Commit

Permalink
update README
Browse files Browse the repository at this point in the history
  • Loading branch information
f1yegor committed Oct 9, 2016
0 parents commit 3331edb
Show file tree
Hide file tree
Showing 2 changed files with 254 additions and 0 deletions.
21 changes: 21 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Clickhouse Exporter for Prometheus

This is a simple server that periodically scrapes ClickHouse(https://clickhouse.yandex/) stats and exports them via HTTP for Prometheus(https://prometheus.io/)
consumption.

To run it:

```bash
./clickhouse_exporter [flags]
```

Help on flags:
```bash
./clickhouse_exporter --help
```

## Using Docker

```
docker run -d -p 9116:9116 f1yegor/clickhouse-exporter -scrape_uri=http://clickhouse.service.consul:8123/
```
233 changes: 233 additions & 0 deletions clickhouse_exporter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
package main

import (
"crypto/tls"
"flag"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strconv"
"strings"
"sync"

"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/log"
"unicode"
)

const (
namespace = "clickhouse" // For Prometheus metrics.
)

var (
listeningAddress = flag.String("telemetry.address", ":9116", "Address on which to expose metrics.")
metricsEndpoint = flag.String("telemetry.endpoint", "/metrics", "Path under which to expose metrics.")
clickhouseScrapeURI = flag.String("scrape_uri", "http://localhost:8123/", "URI to clickhouse http endpoint")
insecure = flag.Bool("insecure", true, "Ignore server certificate if using https")
)

// Exporter collects clickhouse stats from the given URI and exports them using
// the prometheus metrics package.
type Exporter struct {
metricsURI string
eventsURI string
mutex sync.RWMutex
client *http.Client

scrapeFailures prometheus.Counter

gauges []*prometheus.GaugeVec
counters []*prometheus.CounterVec
}

// NewExporter returns an initialized Exporter.
func NewExporter(uri string) *Exporter {
return &Exporter{
metricsURI: uri + "?query=" + url.QueryEscape("select * from system.metrics"),
eventsURI: uri + "?query=" + url.QueryEscape("select * from system.events"),
scrapeFailures: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: namespace,
Name: "exporter_scrape_failures_total",
Help: "Number of errors while scraping clickhouse.",
}),
gauges: make([]*prometheus.GaugeVec, 0, 20),
counters: make([]*prometheus.CounterVec, 0, 20),
client: &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: *insecure},
},
},
}
}

// Describe describes all the metrics ever exported by the clickhouse exporter. It
// implements prometheus.Collector.
func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
// We cannot know in advance what metrics the exporter will generate
// from clickhouse. So we use the poor man's describe method: Run a collect
// and send the descriptors of all the collected metrics.

metricCh := make(chan prometheus.Metric)
doneCh := make(chan struct{})

go func() {
for m := range metricCh {
ch <- m.Desc()
}
close(doneCh)
}()

e.Collect(metricCh)
close(metricCh)
<-doneCh
}

func (e *Exporter) collect(ch chan<- prometheus.Metric) error {

metrics, err := e.parseResponse(e.metricsURI)
if err != nil {
return fmt.Errorf("Error scraping clickhouse url %v: %v", e.metricsURI, err)
}

for _, m := range metrics {
newMetric := prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: toSnake(m.key),
Help: "Number of " + m.key + " currently processed",
}, []string{}).WithLabelValues()
newMetric.Set(float64(m.value))
newMetric.Collect(ch)
//e.gauges = append(e.gauges, newMetric)
}

events, err := e.parseResponse(e.eventsURI)
if err != nil {
return fmt.Errorf("Error scraping clickhouse url %v: %v", e.eventsURI, err)
}

for _, ev := range events {
newMetric := prometheus.NewCounterVec(prometheus.CounterOpts{
Namespace: namespace,
Name: toSnake(ev.key) + "_total",
Help: "Number of " + ev.key + " total processed",
}, []string{}).WithLabelValues()
newMetric.Set(float64(ev.value))
newMetric.Collect(ch)
//e.counters = append(e.counters, newMetric)
}
return nil
}

type lineResult struct {
key string
value int
}

func (e *Exporter) parseResponse(uri string) ([]lineResult, error) {
resp, err := e.client.Get(uri)
if err != nil {
return nil, fmt.Errorf("Error scraping clickhouse: %v", err)
}
defer resp.Body.Close()

data, err := ioutil.ReadAll(resp.Body)
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
if err != nil {
data = []byte(err.Error())
}
return nil, fmt.Errorf("Status %s (%d): %s", resp.Status, resp.StatusCode, data)
}

// Parsing results
lines := strings.Split(string(data), "\n")
var results []lineResult = make([]lineResult, 0)

for i, line := range lines {
parts := strings.Fields(line)
if len(parts) == 0 {
continue
}
if len(parts) != 2 {
return nil, fmt.Errorf("Unexpected %d line: %s", i, line)
}
k := strings.TrimSpace(parts[0])
v, err := strconv.Atoi(strings.TrimSpace(parts[1]))
if err != nil {
return nil, err
}
results = append(results, lineResult{k, v})

}
return results, nil
}

// Collect fetches the stats from configured clickhouse location and delivers them
// as Prometheus metrics. It implements prometheus.Collector.
func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
e.mutex.Lock() // To protect metrics from concurrent collects.
defer e.mutex.Unlock()
if err := e.collect(ch); err != nil {
log.Printf("Error scraping clickhouse: %s", err)
e.scrapeFailures.Inc()
e.scrapeFailures.Collect(ch)
}
// Reset metrics.
for _, vec := range e.gauges {
vec.Reset()
}

for _, vec := range e.counters {
vec.Reset()
}

for _, vec := range e.gauges {
vec.Collect(ch)
}

for _, vec := range e.counters {
vec.Collect(ch)
}

return
}

// toSnake convert the given string to snake case following the Golang format:
// acronyms are converted to lower-case and preceded by an underscore.
func toSnake(in string) string {
runes := []rune(in)
length := len(runes)

var out []rune
for i := 0; i < length; i++ {
if i > 0 && unicode.IsUpper(runes[i]) && ((i+1 < length && unicode.IsLower(runes[i+1])) || unicode.IsLower(runes[i-1])) {
out = append(out, '_')
}
out = append(out, unicode.ToLower(runes[i]))
}

return string(out)
}

//var _ Exporter = (*prometheus.Collector)(nil)

func main() {
flag.Parse()

exporter := NewExporter(*clickhouseScrapeURI)
prometheus.MustRegister(exporter)

log.Printf("Starting Server: %s", *listeningAddress)
http.Handle(*metricsEndpoint, prometheus.Handler())
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`<html>
<head><title>Clickhouse Exporter</title></head>
<body>
<h1>Clickhouse Exporter</h1>
<p><a href="` + *metricsEndpoint + `">Metrics</a></p>
</body>
</html>`))
})

log.Fatal(http.ListenAndServe(*listeningAddress, nil))
}

0 comments on commit 3331edb

Please sign in to comment.