diff --git a/collector/remote_info.go b/collector/remote_info.go index eca15deb..44af0ba7 100644 --- a/collector/remote_info.go +++ b/collector/remote_info.go @@ -161,7 +161,6 @@ func (ri *RemoteInfo) Collect(ch chan<- prometheus.Metric) { ) return } - ri.totalScrapes.Inc() ri.up.Set(1) // Remote Info diff --git a/collector/remote_info_test.go b/collector/remote_info_test.go index 1e9159a0..8940877c 100644 --- a/collector/remote_info_test.go +++ b/collector/remote_info_test.go @@ -19,38 +19,179 @@ import ( "net/http/httptest" "net/url" "os" + "strings" "testing" + "github.com/prometheus/client_golang/prometheus/testutil" "github.com/prometheus/common/promslog" ) -func TestRemoteInfoStats(t *testing.T) { +func TestRemoteInfo(t *testing.T) { // Testcases created using: // docker run -d -p 9200:9200 elasticsearch:VERSION-alpine - // curl http://localhost:9200/_cluster/settings/?include_defaults=true - files := []string{"../fixtures/settings-5.4.2.json", "../fixtures/settings-merge-5.4.2.json"} - for _, filename := range files { - f, _ := os.Open(filename) - defer f.Close() - for hn, handler := range map[string]http.Handler{ - "plain": http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // curl http://localhost:9200/_remote/info + + tests := []struct { + name string + file string + want string + }{ + { + name: "7.15.0", + file: "../fixtures/remote_info/7.15.0.json", + want: ` + # HELP elasticsearch_remote_info_max_connections_per_cluster Max connections per cluster + # TYPE elasticsearch_remote_info_max_connections_per_cluster gauge + elasticsearch_remote_info_max_connections_per_cluster{remote_cluster="cluster_remote_1"} 10 + elasticsearch_remote_info_max_connections_per_cluster{remote_cluster="cluster_remote_2"} 5 + # HELP elasticsearch_remote_info_num_nodes_connected Number of nodes connected + # TYPE elasticsearch_remote_info_num_nodes_connected gauge + elasticsearch_remote_info_num_nodes_connected{remote_cluster="cluster_remote_1"} 3 + elasticsearch_remote_info_num_nodes_connected{remote_cluster="cluster_remote_2"} 0 + # HELP elasticsearch_remote_info_num_proxy_sockets_connected Number of proxy sockets connected + # TYPE elasticsearch_remote_info_num_proxy_sockets_connected gauge + elasticsearch_remote_info_num_proxy_sockets_connected{remote_cluster="cluster_remote_1"} 5 + elasticsearch_remote_info_num_proxy_sockets_connected{remote_cluster="cluster_remote_2"} 0 + # HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON. + # TYPE elasticsearch_remote_info_stats_json_parse_failures counter + elasticsearch_remote_info_stats_json_parse_failures 0 + # HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes. + # TYPE elasticsearch_remote_info_stats_total_scrapes counter + elasticsearch_remote_info_stats_total_scrapes 1 + # HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful. + # TYPE elasticsearch_remote_info_stats_up gauge + elasticsearch_remote_info_stats_up 1 + `, + }, + { + name: "8.0.0", + file: "../fixtures/remote_info/8.0.0.json", + want: ` + # HELP elasticsearch_remote_info_max_connections_per_cluster Max connections per cluster + # TYPE elasticsearch_remote_info_max_connections_per_cluster gauge + elasticsearch_remote_info_max_connections_per_cluster{remote_cluster="prod_cluster"} 30 + # HELP elasticsearch_remote_info_num_nodes_connected Number of nodes connected + # TYPE elasticsearch_remote_info_num_nodes_connected gauge + elasticsearch_remote_info_num_nodes_connected{remote_cluster="prod_cluster"} 15 + # HELP elasticsearch_remote_info_num_proxy_sockets_connected Number of proxy sockets connected + # TYPE elasticsearch_remote_info_num_proxy_sockets_connected gauge + elasticsearch_remote_info_num_proxy_sockets_connected{remote_cluster="prod_cluster"} 25 + # HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON. + # TYPE elasticsearch_remote_info_stats_json_parse_failures counter + elasticsearch_remote_info_stats_json_parse_failures 0 + # HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes. + # TYPE elasticsearch_remote_info_stats_total_scrapes counter + elasticsearch_remote_info_stats_total_scrapes 1 + # HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful. + # TYPE elasticsearch_remote_info_stats_up gauge + elasticsearch_remote_info_stats_up 1 + `, + }, + { + name: "empty", + file: "../fixtures/remote_info/empty.json", + want: ` + # HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON. + # TYPE elasticsearch_remote_info_stats_json_parse_failures counter + elasticsearch_remote_info_stats_json_parse_failures 0 + # HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes. + # TYPE elasticsearch_remote_info_stats_total_scrapes counter + elasticsearch_remote_info_stats_total_scrapes 1 + # HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful. + # TYPE elasticsearch_remote_info_stats_up gauge + elasticsearch_remote_info_stats_up 1 + `, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + f, err := os.Open(tt.file) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { io.Copy(w, f) - }), - } { - ts := httptest.NewServer(handler) + })) defer ts.Close() u, err := url.Parse(ts.URL) if err != nil { - t.Fatalf("Failed to parse URL: %s", err) + t.Fatal(err) } + c := NewRemoteInfo(promslog.NewNopLogger(), http.DefaultClient, u) - nsr, err := c.fetchAndDecodeRemoteInfoStats() if err != nil { - t.Fatalf("Failed to fetch or decode remote info stats: %s", err) + t.Fatal(err) + } + + if err := testutil.CollectAndCompare(c, strings.NewReader(tt.want)); err != nil { + t.Fatal(err) } - t.Logf("[%s/%s] Remote Info Stats Response: %+v", hn, filename, nsr) + }) + } +} + +func TestRemoteInfoError(t *testing.T) { + // Test error handling when endpoint is unavailable + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, "Internal Server Error", http.StatusInternalServerError) + })) + defer ts.Close() + + u, err := url.Parse(ts.URL) + if err != nil { + t.Fatal(err) + } + + c := NewRemoteInfo(promslog.NewNopLogger(), http.DefaultClient, u) + + expected := ` + # HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON. + # TYPE elasticsearch_remote_info_stats_json_parse_failures counter + elasticsearch_remote_info_stats_json_parse_failures 0 + # HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes. + # TYPE elasticsearch_remote_info_stats_total_scrapes counter + elasticsearch_remote_info_stats_total_scrapes 1 + # HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful. + # TYPE elasticsearch_remote_info_stats_up gauge + elasticsearch_remote_info_stats_up 0 + ` + + if err := testutil.CollectAndCompare(c, strings.NewReader(expected)); err != nil { + t.Fatal(err) + } +} + +func TestRemoteInfoJSONParseError(t *testing.T) { + // Test JSON parse error handling + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("invalid json")) + })) + defer ts.Close() + + u, err := url.Parse(ts.URL) + if err != nil { + t.Fatal(err) + } + + c := NewRemoteInfo(promslog.NewNopLogger(), http.DefaultClient, u) + + expected := ` + # HELP elasticsearch_remote_info_stats_json_parse_failures Number of errors while parsing JSON. + # TYPE elasticsearch_remote_info_stats_json_parse_failures counter + elasticsearch_remote_info_stats_json_parse_failures 1 + # HELP elasticsearch_remote_info_stats_total_scrapes Current total ElasticSearch remote info scrapes. + # TYPE elasticsearch_remote_info_stats_total_scrapes counter + elasticsearch_remote_info_stats_total_scrapes 1 + # HELP elasticsearch_remote_info_stats_up Was the last scrape of the ElasticSearch remote info endpoint successful. + # TYPE elasticsearch_remote_info_stats_up gauge + elasticsearch_remote_info_stats_up 0 + ` - } + if err := testutil.CollectAndCompare(c, strings.NewReader(expected)); err != nil { + t.Fatal(err) } } diff --git a/fixtures/remote_info/7.15.0.json b/fixtures/remote_info/7.15.0.json new file mode 100644 index 00000000..deb67cc2 --- /dev/null +++ b/fixtures/remote_info/7.15.0.json @@ -0,0 +1,20 @@ +{ + "cluster_remote_1": { + "seeds": ["192.168.1.100:9300", "192.168.1.101:9300"], + "connected": true, + "num_nodes_connected": 3, + "num_proxy_sockets_connected": 5, + "max_connections_per_cluster": 10, + "initial_connect_timeout": "30s", + "skip_unavailable": false + }, + "cluster_remote_2": { + "seeds": ["10.0.0.50:9300"], + "connected": false, + "num_nodes_connected": 0, + "num_proxy_sockets_connected": 0, + "max_connections_per_cluster": 5, + "initial_connect_timeout": "30s", + "skip_unavailable": true + } +} \ No newline at end of file diff --git a/fixtures/remote_info/8.0.0.json b/fixtures/remote_info/8.0.0.json new file mode 100644 index 00000000..2ab1705b --- /dev/null +++ b/fixtures/remote_info/8.0.0.json @@ -0,0 +1,11 @@ +{ + "prod_cluster": { + "seeds": ["prod-es-1:9300", "prod-es-2:9300", "prod-es-3:9300"], + "connected": true, + "num_nodes_connected": 15, + "num_proxy_sockets_connected": 25, + "max_connections_per_cluster": 30, + "initial_connect_timeout": "60s", + "skip_unavailable": false + } +} \ No newline at end of file diff --git a/fixtures/remote_info/empty.json b/fixtures/remote_info/empty.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/fixtures/remote_info/empty.json @@ -0,0 +1 @@ +{} \ No newline at end of file