Skip to content

Commit

Permalink
linter fixes
Browse files Browse the repository at this point in the history
Signed-off-by: ChrsMark <chrismarkou92@gmail.com>
  • Loading branch information
ChrsMark committed Dec 13, 2022
1 parent 9acd401 commit 82290f3
Show file tree
Hide file tree
Showing 18 changed files with 157 additions and 165 deletions.
29 changes: 16 additions & 13 deletions metricbeat/helper/openmetrics/openmetrics.go
Expand Up @@ -21,13 +21,10 @@ import (
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"net/http"
"regexp"
"time"

"github.com/pkg/errors"

"github.com/elastic/beats/v7/metricbeat/helper"
"github.com/elastic/beats/v7/metricbeat/helper/prometheus"
"github.com/elastic/beats/v7/metricbeat/mb"
Expand Down Expand Up @@ -93,7 +90,7 @@ func (p *openmetrics) GetFamilies() ([]*prometheus.MetricFamily, error) {
}

if resp.StatusCode > 399 {
bodyBytes, err := ioutil.ReadAll(reader)
bodyBytes, err := io.ReadAll(reader)
if err == nil {
p.logger.Debug("error received from openmetrics endpoint: ", string(bodyBytes))
}
Expand All @@ -106,8 +103,14 @@ func (p *openmetrics) GetFamilies() ([]*prometheus.MetricFamily, error) {
}

appendTime := time.Now().Round(0)
b, err := ioutil.ReadAll(reader)
b, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("failed to read response: %w", err)
}
families, err := prometheus.ParseMetricFamilies(b, contentType, appendTime)
if err != nil {
return nil, fmt.Errorf("failed to parse families: %w", err)
}

return families, nil
}
Expand Down Expand Up @@ -169,14 +172,14 @@ func (p *openmetrics) ProcessMetrics(families []*prometheus.MetricFamily, mappin
for k, v := range allLabels {
if l, ok := mapping.Labels[k]; ok {
if l.IsKey() {
keyLabels.Put(l.GetField(), v)
_, _ = keyLabels.Put(l.GetField(), v)
} else {
labels.Put(l.GetField(), v)
_, _ = labels.Put(l.GetField(), v)
}
} else if storeAllLabels {
// if label for this metric is not found at the label mappings but
// it is configured to store any labels found, make it so
labels.Put(labelsLocation+"."+k, v)
_, _ = labels.Put(labelsLocation+"."+k, v)
}
}

Expand All @@ -186,7 +189,7 @@ func (p *openmetrics) ProcessMetrics(families []*prometheus.MetricFamily, mappin
// not considering these extra fields to be keylabels as that case
// have not appeared yet
for k, v := range extraFields {
labels.Put(k, v)
_, _ = labels.Put(k, v)
}

// Keep a info document if it's an infoMetric
Expand All @@ -202,7 +205,7 @@ func (p *openmetrics) ProcessMetrics(families []*prometheus.MetricFamily, mappin
if field != "" {
event := getEvent(eventsMap, keyLabels)
update := mapstr.M{}
update.Put(field, value)
_, _ = update.Put(field, value)
// value may be a mapstr (for histograms and summaries), do a deep update to avoid smashing existing fields
event.DeepUpdate(update)

Expand Down Expand Up @@ -260,7 +263,7 @@ type infoMetricData struct {
func (p *openmetrics) ReportProcessedMetrics(mapping *MetricsMapping, r mb.ReporterV2) error {
events, err := p.GetProcessedMetrics(mapping)
if err != nil {
return errors.Wrap(err, "error getting processed metrics")
return fmt.Errorf("error getting processed metrics: %w", err)
}
for _, event := range events {
r.Event(mb.Event{
Expand All @@ -286,7 +289,7 @@ func getLabels(metric *prometheus.OpenMetric) mapstr.M {
labels := mapstr.M{}
for _, label := range metric.GetLabel() {
if label.Name != "" && label.Value != "" {
labels.Put(label.Name, label.Value)
_, _ = labels.Put(label.Name, label.Value)
}
}
return labels
Expand All @@ -300,7 +303,7 @@ func CompilePatternList(patterns *[]string) ([]*regexp.Regexp, error) {
for _, pattern := range *patterns {
r, err := regexp.Compile(pattern)
if err != nil {
return nil, errors.Wrapf(err, "compiling pattern '%s'", pattern)
return nil, fmt.Errorf("failed to compile pattern '%s': %w", pattern, err)
}
compiledPatterns = append(compiledPatterns, r)
}
Expand Down
10 changes: 5 additions & 5 deletions metricbeat/helper/openmetrics/openmetrics_test.go
Expand Up @@ -20,7 +20,7 @@ package openmetrics
import (
"bytes"
"compress/gzip"
"io/ioutil"
"io"
"net/http"
"sort"
"testing"
Expand Down Expand Up @@ -186,7 +186,7 @@ var _ = httpfetcher(&mockFetcher{})
func (m mockFetcher) FetchResponse() (*http.Response, error) {
body := bytes.NewBuffer(nil)
writer := gzip.NewWriter(body)
writer.Write([]byte(m.response))
_, _ = writer.Write([]byte(m.response))
writer.Close()

return &http.Response{
Expand All @@ -195,7 +195,7 @@ func (m mockFetcher) FetchResponse() (*http.Response, error) {
"Content-Encoding": []string{"gzip"},
"Content-Type": []string{"application/openmetrics-text"},
},
Body: ioutil.NopCloser(body),
Body: io.NopCloser(body),
}, nil
}

Expand Down Expand Up @@ -576,7 +576,7 @@ func TestOpenMetrics(t *testing.T) {
for _, test := range tests {
t.Run(test.msg, func(t *testing.T) {
reporter := &mbtest.CapturingReporterV2{}
p.ReportProcessedMetrics(test.mapping, reporter)
_ = p.ReportProcessedMetrics(test.mapping, reporter)
assert.Nil(t, reporter.GetErrors(), test.msg)
// Sort slice to avoid randomness
res := reporter.GetEvents()
Expand Down Expand Up @@ -1062,7 +1062,7 @@ func TestOpenMetricsKeyLabels(t *testing.T) {
for _, tc := range testCases {
r := &mbtest.CapturingReporterV2{}
p := &openmetrics{mockFetcher{response: tc.openmetricsResponse}, logp.NewLogger("test")}
p.ReportProcessedMetrics(tc.mapping, r)
_ = p.ReportProcessedMetrics(tc.mapping, r)
if !assert.Nil(t, r.GetErrors(),
"error reporting/processing metrics, at %q", tc.testName) {
continue
Expand Down
29 changes: 16 additions & 13 deletions metricbeat/helper/prometheus/prometheus.go
Expand Up @@ -21,13 +21,10 @@ import (
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"net/http"
"regexp"
"time"

"github.com/pkg/errors"

"github.com/elastic/beats/v7/metricbeat/helper"
"github.com/elastic/beats/v7/metricbeat/mb"
"github.com/elastic/elastic-agent-libs/logp"
Expand Down Expand Up @@ -93,7 +90,7 @@ func (p *prometheus) GetFamilies() ([]*MetricFamily, error) {
}

if resp.StatusCode > 399 {
bodyBytes, err := ioutil.ReadAll(reader)
bodyBytes, err := io.ReadAll(reader)
if err == nil {
p.logger.Debug("error received from prometheus endpoint: ", string(bodyBytes))
}
Expand Down Expand Up @@ -129,8 +126,14 @@ func (p *prometheus) GetFamilies() ([]*MetricFamily, error) {
}

appendTime := time.Now().Round(0)
b, err := ioutil.ReadAll(reader)
b, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("failed to read response: %w", err)
}
families, err := ParseMetricFamilies(b, contentType, appendTime)
if err != nil {
return nil, fmt.Errorf("failed to parse families: %w", err)
}

return families, nil
}
Expand Down Expand Up @@ -192,15 +195,15 @@ func (p *prometheus) ProcessMetrics(families []*MetricFamily, mapping *MetricsMa
for k, v := range allLabels {
if l, ok := mapping.Labels[k]; ok {
if l.IsKey() {
keyLabels.Put(l.GetField(), v)
_, _ = keyLabels.Put(l.GetField(), v)
} else {
labels.Put(l.GetField(), v)
_, _ = labels.Put(l.GetField(), v)
}
} else if storeAllLabels {
// if label for this metric is not found at the label mappings but
// it is configured to store any labels found, make it so
// TODO dedot
labels.Put(labelsLocation+"."+k, v)
_, _ = labels.Put(labelsLocation+"."+k, v)
}
}

Expand All @@ -210,7 +213,7 @@ func (p *prometheus) ProcessMetrics(families []*MetricFamily, mapping *MetricsMa
// not considering these extra fields to be keylabels as that case
// have not appeared yet
for k, v := range extraFields {
labels.Put(k, v)
_, _ = labels.Put(k, v)
}

// Keep a info document if it's an infoMetric
Expand All @@ -226,7 +229,7 @@ func (p *prometheus) ProcessMetrics(families []*MetricFamily, mapping *MetricsMa
if field != "" {
event := getEvent(eventsMap, keyLabels)
update := mapstr.M{}
update.Put(field, value)
_, _ = update.Put(field, value)
// value may be a mapstr (for histograms and summaries), do a deep update to avoid smashing existing fields
event.DeepUpdate(update)

Expand Down Expand Up @@ -284,7 +287,7 @@ type infoMetricData struct {
func (p *prometheus) ReportProcessedMetrics(mapping *MetricsMapping, r mb.ReporterV2) error {
events, err := p.GetProcessedMetrics(mapping)
if err != nil {
return errors.Wrap(err, "error getting processed metrics")
return fmt.Errorf("error getting processed metrics: %w", err)
}
for _, event := range events {
r.Event(mb.Event{
Expand All @@ -310,7 +313,7 @@ func getLabels(metric *OpenMetric) mapstr.M {
labels := mapstr.M{}
for _, label := range metric.GetLabel() {
if label.Name != "" && label.Value != "" {
labels.Put(label.Name, label.Value)
_, _ = labels.Put(label.Name, label.Value)
}
}
return labels
Expand All @@ -324,7 +327,7 @@ func CompilePatternList(patterns *[]string) ([]*regexp.Regexp, error) {
for _, pattern := range *patterns {
r, err := regexp.Compile(pattern)
if err != nil {
return nil, errors.Wrapf(err, "compiling pattern '%s'", pattern)
return nil, fmt.Errorf("failed compiling pattern '%s': %w", pattern, err)
}
compiledPatterns = append(compiledPatterns, r)
}
Expand Down
10 changes: 5 additions & 5 deletions metricbeat/helper/prometheus/prometheus_test.go
Expand Up @@ -20,7 +20,7 @@ package prometheus
import (
"bytes"
"compress/gzip"
"io/ioutil"
"io"
"net/http"
"sort"
"testing"
Expand Down Expand Up @@ -188,15 +188,15 @@ var _ = httpfetcher(&mockFetcher{})
func (m mockFetcher) FetchResponse() (*http.Response, error) {
body := bytes.NewBuffer(nil)
writer := gzip.NewWriter(body)
writer.Write([]byte(m.response))
_, _ = writer.Write([]byte(m.response))
writer.Close()

return &http.Response{
StatusCode: 200,
Header: http.Header{
"Content-Encoding": []string{"gzip"},
},
Body: ioutil.NopCloser(body),
Body: io.NopCloser(body),
}, nil
}

Expand Down Expand Up @@ -514,7 +514,7 @@ func TestPrometheus(t *testing.T) {
for _, test := range tests {
t.Run(test.msg, func(t *testing.T) {
reporter := &mbtest.CapturingReporterV2{}
p.ReportProcessedMetrics(test.mapping, reporter)
_ = p.ReportProcessedMetrics(test.mapping, reporter)
assert.Nil(t, reporter.GetErrors(), test.msg)
// Sort slice to avoid randomness
res := reporter.GetEvents()
Expand Down Expand Up @@ -971,7 +971,7 @@ func TestPrometheusKeyLabels(t *testing.T) {
for _, tc := range testCases {
r := &mbtest.CapturingReporterV2{}
p := &prometheus{mockFetcher{response: tc.prometheusResponse}, logp.NewLogger("test")}
p.ReportProcessedMetrics(tc.mapping, r)
_ = p.ReportProcessedMetrics(tc.mapping, r)
if !assert.Nil(t, r.GetErrors(),
"error reporting/processing metrics, at %q", tc.testName) {
continue
Expand Down
10 changes: 5 additions & 5 deletions metricbeat/helper/prometheus/ptest/ptest.go
Expand Up @@ -19,7 +19,7 @@ package ptest

import (
"encoding/json"
"io/ioutil"
"io"
"net/http"
"net/http/httptest"
"os"
Expand Down Expand Up @@ -54,13 +54,13 @@ func TestMetricSet(t *testing.T, module, metricset string, cases TestCases) {
file, err := os.Open(test.MetricsFile)
assert.NoError(t, err, "cannot open test file "+test.MetricsFile)

body, err := ioutil.ReadAll(file)
body, err := io.ReadAll(file)
assert.NoError(t, err, "cannot read test file "+test.MetricsFile)

server := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
w.Header().Set("Content-Type", "text/plain; charset=ISO-8859-1")
w.Write([]byte(body))
_, _ = w.Write(body)
}))

server.Start()
Expand All @@ -83,12 +83,12 @@ func TestMetricSet(t *testing.T, module, metricset string, cases TestCases) {
return h1 < h2
})
eventsJSON, _ := json.MarshalIndent(events, "", "\t")
err = ioutil.WriteFile(test.ExpectedFile, eventsJSON, 0644)
err = os.WriteFile(test.ExpectedFile, eventsJSON, 0644)
assert.NoError(t, err)
}

// Read expected events from reference file
expected, err := ioutil.ReadFile(test.ExpectedFile)
expected, err := os.ReadFile(test.ExpectedFile)
if err != nil {
t.Fatal(err)
}
Expand Down

0 comments on commit 82290f3

Please sign in to comment.