Skip to content

Commit

Permalink
Merge pull request ClickHouse#4 from bobrik/parts-stats
Browse files Browse the repository at this point in the history
Add stats for part counts and sizes.

@bobrik Thanks for contribution!
  • Loading branch information
f1yegor committed Jul 10, 2017
2 parents 2dfd2ce + b608398 commit 255cc60
Showing 1 changed file with 92 additions and 10 deletions.
102 changes: 92 additions & 10 deletions clickhouse_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ type Exporter struct {
metricsURI string
asyncMetricsURI string
eventsURI string
partsURI string
mutex sync.RWMutex
client *http.Client

Expand All @@ -58,10 +59,15 @@ func NewExporter(uri url.URL) *Exporter {
q.Set("query", "select * from system.events")
eventsURI.RawQuery = q.Encode()

partsURI := uri
q.Set("query", "select database, table, sum(bytes) as bytes, count() as parts from system.parts where active = 1 group by database, table")
partsURI.RawQuery = q.Encode()

return &Exporter{
metricsURI: metricsURI.String(),
asyncMetricsURI: asyncMetricsURI.String(),
eventsURI: eventsURI.String(),
partsURI: partsURI.String(),
scrapeFailures: prometheus.NewCounter(prometheus.CounterOpts{
Namespace: namespace,
Name: "exporter_scrape_failures_total",
Expand Down Expand Up @@ -100,8 +106,7 @@ func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
}

func (e *Exporter) collect(ch chan<- prometheus.Metric) error {

metrics, err := e.parseResponse(e.metricsURI)
metrics, err := e.parseKeyValueResponse(e.metricsURI)
if err != nil {
return fmt.Errorf("Error scraping clickhouse url %v: %v", e.metricsURI, err)
}
Expand All @@ -116,7 +121,7 @@ func (e *Exporter) collect(ch chan<- prometheus.Metric) error {
newMetric.Collect(ch)
}

asyncMetrics, err := e.parseResponse(e.asyncMetricsURI)
asyncMetrics, err := e.parseKeyValueResponse(e.asyncMetricsURI)
if err != nil {
return fmt.Errorf("Error scraping clickhouse url %v: %v", e.asyncMetricsURI, err)
}
Expand All @@ -131,7 +136,7 @@ func (e *Exporter) collect(ch chan<- prometheus.Metric) error {
newMetric.Collect(ch)
}

events, err := e.parseResponse(e.eventsURI)
events, err := e.parseKeyValueResponse(e.eventsURI)
if err != nil {
return fmt.Errorf("Error scraping clickhouse url %v: %v", e.eventsURI, err)
}
Expand All @@ -144,15 +149,34 @@ func (e *Exporter) collect(ch chan<- prometheus.Metric) error {
prometheus.CounterValue, float64(ev.value))
ch <- newMetric
}
return nil
}

type lineResult struct {
key string
value int
parts, err := e.parsePartsResponse(e.partsURI)
if err != nil {
return fmt.Errorf("Error scraping clickhouse url %v: %v", e.partsURI, err)
}

for _, part := range parts {
newBytesMetric := prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "table_parts_bytes",
Help: "Table size in bytes",
}, []string{"database", "table"}).WithLabelValues(part.database, part.table)
newBytesMetric.Set(float64(part.bytes))
newBytesMetric.Collect(ch)

newCountMetric := prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "table_parts_count",
Help: "Number of parts of the table",
}, []string{"database", "table"}).WithLabelValues(part.database, part.table)
newCountMetric.Set(float64(part.parts))
newCountMetric.Collect(ch)
}

return nil
}

func (e *Exporter) parseResponse(uri string) ([]lineResult, error) {
func (e *Exporter) response(uri string) ([]byte, error) {
resp, err := e.client.Get(uri)
if err != nil {
return nil, fmt.Errorf("Error scraping clickhouse: %v", err)
Expand All @@ -167,6 +191,20 @@ func (e *Exporter) parseResponse(uri string) ([]lineResult, error) {
return nil, fmt.Errorf("Status %s (%d): %s", resp.Status, resp.StatusCode, data)
}

return data, nil
}

type lineResult struct {
key string
value int
}

func (e *Exporter) parseKeyValueResponse(uri string) ([]lineResult, error) {
data, err := e.response(uri)
if err != nil {
return nil, err
}

// Parsing results
lines := strings.Split(string(data), "\n")
var results []lineResult = make([]lineResult, 0)
Expand All @@ -190,6 +228,50 @@ func (e *Exporter) parseResponse(uri string) ([]lineResult, error) {
return results, nil
}

type partsResult struct {
database string
table string
bytes int
parts int
}

func (e *Exporter) parsePartsResponse(uri string) ([]partsResult, error) {
data, err := e.response(uri)
if err != nil {
return nil, err
}

// Parsing results
lines := strings.Split(string(data), "\n")
var results []partsResult = make([]partsResult, 0)

for i, line := range lines {
parts := strings.Fields(line)
if len(parts) == 0 {
continue
}
if len(parts) != 4 {
return nil, fmt.Errorf("Unexpected %d line: %s", i, line)
}
database := strings.TrimSpace(parts[0])
table := strings.TrimSpace(parts[1])

bytes, err := strconv.Atoi(strings.TrimSpace(parts[2]))
if err != nil {
return nil, err
}

count, err := strconv.Atoi(strings.TrimSpace(parts[3]))
if err != nil {
return nil, err
}

results = append(results, partsResult{database, table, bytes, count})
}

return results, nil
}

// Collect fetches the stats from configured clickhouse location and delivers them
// as Prometheus metrics. It implements prometheus.Collector.
func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
Expand Down

0 comments on commit 255cc60

Please sign in to comment.