Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion collector/remote_info.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,6 @@ func (ri *RemoteInfo) Collect(ch chan<- prometheus.Metric) {
)
return
}
ri.totalScrapes.Inc()

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why was this here? I see it at the top of the method as well. Have we been double counting?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Apparently :D AI caught it

ri.up.Set(1)

// Remote Info
Expand Down
173 changes: 157 additions & 16 deletions collector/remote_info_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,38 +19,179 @@ import (
"net/http/httptest"
"net/url"
"os"
"strings"
"testing"

"github.com/prometheus/client_golang/prometheus/testutil"
"github.com/prometheus/common/promslog"
)

func TestRemoteInfoStats(t *testing.T) {
func TestRemoteInfo(t *testing.T) {
// Testcases created using:
// docker run -d -p 9200:9200 elasticsearch:VERSION-alpine
// curl http://localhost:9200/_cluster/settings/?include_defaults=true
files := []string{"../fixtures/settings-5.4.2.json", "../fixtures/settings-merge-5.4.2.json"}
for _, filename := range files {
f, _ := os.Open(filename)
defer f.Close()
for hn, handler := range map[string]http.Handler{
"plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// curl http://localhost:9200/_remote/info

tests := []struct {
name string
file string
want string
}{
{
name: "7.15.0",
file: "../fixtures/remote_info/7.15.0.json",
want: `
# HELP elasticsearch_remote_info_max_connections_per_cluster Max connections per cluster
# TYPE elasticsearch_remote_info_max_connections_per_cluster gauge
elasticsearch_remote_info_max_connections_per_cluster{remote_cluster="cluster_remote_1"} 10
elasticsearch_remote_info_max_connections_per_cluster{remote_cluster="cluster_remote_2"} 5
# HELP elasticsearch_remote_info_num_nodes_connected Number of nodes connected
# TYPE elasticsearch_remote_info_num_nodes_connected gauge
elasticsearch_remote_info_num_nodes_connected{remote_cluster="cluster_remote_1"} 3
elasticsearch_remote_info_num_nodes_connected{remote_cluster="cluster_remote_2"} 0
# HELP elasticsearch_remote_info_num_proxy_sockets_connected Number of proxy sockets connected
# TYPE elasticsearch_remote_info_num_proxy_sockets_connected gauge
elasticsearch_remote_info_num_proxy_sockets_connected{remote_cluster="cluster_remote_1"} 5
elasticsearch_remote_info_num_proxy_sockets_connected{remote_cluster="cluster_remote_2"} 0
# HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON.
# TYPE elasticsearch_remote_info_stats_json_parse_failures counter
elasticsearch_remote_info_stats_json_parse_failures 0
# HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes.
# TYPE elasticsearch_remote_info_stats_total_scrapes counter
elasticsearch_remote_info_stats_total_scrapes 1
# HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful.
# TYPE elasticsearch_remote_info_stats_up gauge
elasticsearch_remote_info_stats_up 1
`,
},
{
name: "8.0.0",
file: "../fixtures/remote_info/8.0.0.json",
want: `
# HELP elasticsearch_remote_info_max_connections_per_cluster Max connections per cluster
# TYPE elasticsearch_remote_info_max_connections_per_cluster gauge
elasticsearch_remote_info_max_connections_per_cluster{remote_cluster="prod_cluster"} 30
# HELP elasticsearch_remote_info_num_nodes_connected Number of nodes connected
# TYPE elasticsearch_remote_info_num_nodes_connected gauge
elasticsearch_remote_info_num_nodes_connected{remote_cluster="prod_cluster"} 15
# HELP elasticsearch_remote_info_num_proxy_sockets_connected Number of proxy sockets connected
# TYPE elasticsearch_remote_info_num_proxy_sockets_connected gauge
elasticsearch_remote_info_num_proxy_sockets_connected{remote_cluster="prod_cluster"} 25
# HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON.
# TYPE elasticsearch_remote_info_stats_json_parse_failures counter
elasticsearch_remote_info_stats_json_parse_failures 0
# HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes.
# TYPE elasticsearch_remote_info_stats_total_scrapes counter
elasticsearch_remote_info_stats_total_scrapes 1
# HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful.
# TYPE elasticsearch_remote_info_stats_up gauge
elasticsearch_remote_info_stats_up 1
`,
},
{
name: "empty",
file: "../fixtures/remote_info/empty.json",
want: `
# HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON.
# TYPE elasticsearch_remote_info_stats_json_parse_failures counter
elasticsearch_remote_info_stats_json_parse_failures 0
# HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes.
# TYPE elasticsearch_remote_info_stats_total_scrapes counter
elasticsearch_remote_info_stats_total_scrapes 1
# HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful.
# TYPE elasticsearch_remote_info_stats_up gauge
elasticsearch_remote_info_stats_up 1
`,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.file)
if err != nil {
t.Fatal(err)
}
defer f.Close()

ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
io.Copy(w, f)
}),
} {
ts := httptest.NewServer(handler)
}))
defer ts.Close()

u, err := url.Parse(ts.URL)
if err != nil {
t.Fatalf("Failed to parse URL: %s", err)
t.Fatal(err)
}

c := NewRemoteInfo(promslog.NewNopLogger(), http.DefaultClient, u)
nsr, err := c.fetchAndDecodeRemoteInfoStats()
if err != nil {
t.Fatalf("Failed to fetch or decode remote info stats: %s", err)
t.Fatal(err)
}

if err := testutil.CollectAndCompare(c, strings.NewReader(tt.want)); err != nil {
t.Fatal(err)
}
t.Logf("[%s/%s] Remote Info Stats Response: %+v", hn, filename, nsr)
})
}
}

func TestRemoteInfoError(t *testing.T) {
// Test error handling when endpoint is unavailable
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
}))
defer ts.Close()

u, err := url.Parse(ts.URL)
if err != nil {
t.Fatal(err)
}

c := NewRemoteInfo(promslog.NewNopLogger(), http.DefaultClient, u)

expected := `
# HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON.
# TYPE elasticsearch_remote_info_stats_json_parse_failures counter
elasticsearch_remote_info_stats_json_parse_failures 0
# HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes.
# TYPE elasticsearch_remote_info_stats_total_scrapes counter
elasticsearch_remote_info_stats_total_scrapes 1
# HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful.
# TYPE elasticsearch_remote_info_stats_up gauge
elasticsearch_remote_info_stats_up 0
`

if err := testutil.CollectAndCompare(c, strings.NewReader(expected)); err != nil {
t.Fatal(err)
}
}

func TestRemoteInfoJSONParseError(t *testing.T) {
// Test JSON parse error handling
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("invalid json"))
}))
defer ts.Close()

u, err := url.Parse(ts.URL)
if err != nil {
t.Fatal(err)
}

c := NewRemoteInfo(promslog.NewNopLogger(), http.DefaultClient, u)

expected := `
# HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON.
# TYPE elasticsearch_remote_info_stats_json_parse_failures counter
elasticsearch_remote_info_stats_json_parse_failures 1
# HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes.
# TYPE elasticsearch_remote_info_stats_total_scrapes counter
elasticsearch_remote_info_stats_total_scrapes 1
# HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful.
# TYPE elasticsearch_remote_info_stats_up gauge
elasticsearch_remote_info_stats_up 0
`

}
if err := testutil.CollectAndCompare(c, strings.NewReader(expected)); err != nil {
t.Fatal(err)
}
}
20 changes: 20 additions & 0 deletions fixtures/remote_info/7.15.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"cluster_remote_1": {
"seeds": ["192.168.1.100:9300", "192.168.1.101:9300"],
"connected": true,
"num_nodes_connected": 3,
"num_proxy_sockets_connected": 5,
"max_connections_per_cluster": 10,
"initial_connect_timeout": "30s",
"skip_unavailable": false
},
"cluster_remote_2": {
"seeds": ["10.0.0.50:9300"],
"connected": false,
"num_nodes_connected": 0,
"num_proxy_sockets_connected": 0,
"max_connections_per_cluster": 5,
"initial_connect_timeout": "30s",
"skip_unavailable": true
}
}
11 changes: 11 additions & 0 deletions fixtures/remote_info/8.0.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"prod_cluster": {
"seeds": ["prod-es-1:9300", "prod-es-2:9300", "prod-es-3:9300"],
"connected": true,
"num_nodes_connected": 15,
"num_proxy_sockets_connected": 25,
"max_connections_per_cluster": 30,
"initial_connect_timeout": "60s",
"skip_unavailable": false
}
}
1 change: 1 addition & 0 deletions fixtures/remote_info/empty.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}