diff --git a/x-pack/metricbeat/module/azure/azure_test.go b/x-pack/metricbeat/module/azure/azure_test.go new file mode 100644 index 000000000000..d81f60a103d4 --- /dev/null +++ b/x-pack/metricbeat/module/azure/azure_test.go @@ -0,0 +1,34 @@ +package azure + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestGroupMetricsDefinitionsByResourceId(t *testing.T) { + + t.Run("Group metrics definitions by resource ID", func(t *testing.T) { + metrics := []Metric{ + { + ResourceId: "test", + Namespace: "test", + Names: []string{"name1"}, + }, + { + ResourceId: "test", + Namespace: "test", + Names: []string{"name2"}, + }, + { + ResourceId: "test", + Namespace: "test", + Names: []string{"name3"}, + }, + } + + metricsByResourceId := groupMetricsDefinitionsByResourceId(metrics) + + assert.Equal(t, 1, len(metricsByResourceId)) + assert.Equal(t, 3, len(metricsByResourceId["test"])) + }) +} diff --git a/x-pack/metricbeat/module/azure/client_test.go b/x-pack/metricbeat/module/azure/client_test.go index 79b1742ded0f..daae1bdbc820 100644 --- a/x-pack/metricbeat/module/azure/client_test.go +++ b/x-pack/metricbeat/module/azure/client_test.go @@ -6,6 +6,8 @@ package azure import ( "errors" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" + "github.com/stretchr/testify/require" "testing" "time" @@ -112,4 +114,154 @@ func TestGetMetricValues(t *testing.T) { assert.Equal(t, len(client.ResourceConfigurations.Metrics[0].Values), 0) m.AssertExpectations(t) }) + + t.Run("multiple aggregation types", func(t *testing.T) { + client := NewMockClient() + referenceTime := time.Now().UTC() + client.ResourceConfigurations = ResourceConfiguration{ + Metrics: []Metric{ + { + Namespace: "Microsoft.EventHub/Namespaces", + Names: []string{"ActiveConnections"}, + Aggregations: "Maximum,Minimum,Average", + TimeGrain: "PT1M", + }, + }, + } + + m := &MockService{} + m.On( + "GetMetricValues", + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + ).Return( + []armmonitor.Metric{{ + ID: to.Ptr("test"), + Name: &armmonitor.LocalizableString{ + Value: to.Ptr("ActiveConnections"), + LocalizedValue: to.Ptr("ActiveConnections"), + }, + Timeseries: []*armmonitor.TimeSeriesElement{{ + Data: []*armmonitor.MetricValue{{ + Average: to.Ptr(1.0), + Maximum: to.Ptr(2.0), + Minimum: to.Ptr(3.0), + TimeStamp: to.Ptr(time.Now()), + }}, + }}, + Type: to.Ptr("Microsoft.Insights/metrics"), + Unit: to.Ptr(armmonitor.MetricUnit("Count")), + DisplayDescription: to.Ptr("Total Active Connections for Microsoft.EventHub."), + ErrorCode: to.Ptr("Success"), + }}, + "PT1M", + nil, + ) + + client.AzureMonitorService = m + mr := MockReporterV2{} + + metricValues := client.GetMetricValues(referenceTime, client.ResourceConfigurations.Metrics, &mr) + + require.Equal(t, len(metricValues), 1) + require.Equal(t, len(metricValues[0].Values), 1) + + assert.Equal(t, *metricValues[0].Values[0].avg, 1.0) + assert.Equal(t, *metricValues[0].Values[0].max, 2.0) + assert.Equal(t, *metricValues[0].Values[0].min, 3.0) + + require.Equal(t, len(client.ResourceConfigurations.Metrics[0].Values), 1) + + m.AssertExpectations(t) + }) + + t.Run("single aggregation types", func(t *testing.T) { + client := NewMockClient() + referenceTime := time.Now().UTC() + timestamp := time.Now().UTC() + client.ResourceConfigurations = ResourceConfiguration{ + Metrics: []Metric{ + { + Namespace: "Microsoft.EventHub/Namespaces", + Names: []string{"ActiveConnections"}, + Aggregations: "Maximum", + TimeGrain: "PT1M", + }, { + Namespace: "Microsoft.EventHub/Namespaces", + Names: []string{"ActiveConnections"}, + Aggregations: "Minimum", + TimeGrain: "PT1M", + }, { + Namespace: "Microsoft.EventHub/Namespaces", + Names: []string{"ActiveConnections"}, + Aggregations: "Average", + TimeGrain: "PT1M", + }, + }, + } + + m := &MockService{} + + x := []struct { + aggregation string + data []*armmonitor.MetricValue + }{ + {aggregation: "Maximum", data: []*armmonitor.MetricValue{{Maximum: to.Ptr(3.0), TimeStamp: to.Ptr(timestamp)}}}, + {aggregation: "Minimum", data: []*armmonitor.MetricValue{{Minimum: to.Ptr(1.0), TimeStamp: to.Ptr(timestamp)}}}, + {aggregation: "Average", data: []*armmonitor.MetricValue{{Average: to.Ptr(2.0), TimeStamp: to.Ptr(timestamp)}}}, + } + + for _, v := range x { + m.On( + "GetMetricValues", + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + mock.Anything, + v.aggregation, + mock.Anything, + ).Return( + []armmonitor.Metric{{ + ID: to.Ptr("test"), + Name: &armmonitor.LocalizableString{ + Value: to.Ptr("ActiveConnections"), + LocalizedValue: to.Ptr("ActiveConnections"), + }, + Timeseries: []*armmonitor.TimeSeriesElement{{ + Data: v.data, + }}, + Type: to.Ptr("Microsoft.Insights/metrics"), + Unit: to.Ptr(armmonitor.MetricUnit("Count")), + DisplayDescription: to.Ptr("Total Active Connections for Microsoft.EventHub."), + ErrorCode: to.Ptr("Success"), + }}, + "PT1M", + nil, + ).Once() + } + + client.AzureMonitorService = m + mr := MockReporterV2{} + + metricValues := client.GetMetricValues(referenceTime, client.ResourceConfigurations.Metrics, &mr) + + require.Equal(t, 3, len(metricValues)) + require.Equal(t, 1, len(metricValues[0].Values)) + + require.NotNil(t, metricValues[0].Values[0].max, "max value is nil") + require.NotNil(t, metricValues[0].Values[0].min, "min value is nil") + require.NotNil(t, metricValues[0].Values[0].avg, "avg value is nil") + + assert.Equal(t, *metricValues[0].Values[0].max, 3.0) + assert.Equal(t, *metricValues[2].Values[0].min, 1.0) + assert.Equal(t, *metricValues[1].Values[0].avg, 2.0) + + m.AssertExpectations(t) + }) } diff --git a/x-pack/metricbeat/module/azure/data.go b/x-pack/metricbeat/module/azure/data.go index c46aee9da246..b2fffb404262 100644 --- a/x-pack/metricbeat/module/azure/data.go +++ b/x-pack/metricbeat/module/azure/data.go @@ -133,41 +133,8 @@ func mapToKeyValuePoints(metrics []Metric) []KeyValuePoint { var points []KeyValuePoint for _, metric := range metrics { for _, value := range metric.Values { - point := KeyValuePoint{ - Timestamp: value.timestamp, - Dimensions: mapstr.M{}, - } - metricName := managePropertyName(value.name) - switch { - case value.min != nil: - point.Key = fmt.Sprintf("%s.%s", metricName, "min") - point.Value = value.min - case value.max != nil: - point.Key = fmt.Sprintf("%s.%s", metricName, "max") - point.Value = value.max - case value.avg != nil: - point.Key = fmt.Sprintf("%s.%s", metricName, "avg") - point.Value = value.avg - case value.total != nil: - point.Key = fmt.Sprintf("%s.%s", metricName, "total") - point.Value = value.total - case value.count != nil: - point.Key = fmt.Sprintf("%s.%s", metricName, "count") - point.Value = value.count - } - - point.Namespace = metric.Namespace - point.ResourceId = metric.ResourceId - point.ResourceSubId = metric.ResourceSubId - point.TimeGrain = metric.TimeGrain - - // The number of dimensions in the metric definition and the - // number of dimensions in the metric values should be the same. - // - // But, since definitions and values are retrieved from different - // API endpoints, we need to make sure that we don't panic if the - // number of dimensions is different. + dimensions := mapstr.M{} if len(metric.Dimensions) == len(value.dimensions) { // Take the dimension name from the metric definition and the // dimension value from the metric value. @@ -180,11 +147,75 @@ func mapToKeyValuePoints(metrics []Metric) []KeyValuePoint { // Dimensions from metric definition and metric value are // not guaranteed to be in the same order, so we need to // find by name the right value for each dimension. - _, _ = point.Dimensions.Put(dim.Name, getDimensionValue(dim.Name, value.dimensions)) + // _, _ = point.Dimensions.Put(dim.Name, getDimensionValue(dim.Name, value.dimensions)) + _, _ = dimensions.Put(dim.Name, getDimensionValue(dim.Name, value.dimensions)) } } - points = append(points, point) + if value.min != nil { + points = append(points, KeyValuePoint{ + Key: fmt.Sprintf("%s.%s", metricName, "min"), + Value: value.min, + Namespace: metric.Namespace, + ResourceId: metric.ResourceId, + ResourceSubId: metric.ResourceSubId, + TimeGrain: metric.TimeGrain, + Dimensions: dimensions, + Timestamp: value.timestamp, + }) + } + + if value.max != nil { + points = append(points, KeyValuePoint{ + Key: fmt.Sprintf("%s.%s", metricName, "max"), + Value: value.max, + Namespace: metric.Namespace, + ResourceId: metric.ResourceId, + ResourceSubId: metric.ResourceSubId, + TimeGrain: metric.TimeGrain, + Dimensions: dimensions, + Timestamp: value.timestamp, + }) + } + + if value.avg != nil { + points = append(points, KeyValuePoint{ + Key: fmt.Sprintf("%s.%s", metricName, "avg"), + Value: value.avg, + Namespace: metric.Namespace, + ResourceId: metric.ResourceId, + ResourceSubId: metric.ResourceSubId, + TimeGrain: metric.TimeGrain, + Dimensions: dimensions, + Timestamp: value.timestamp, + }) + } + + if value.total != nil { + points = append(points, KeyValuePoint{ + Key: fmt.Sprintf("%s.%s", metricName, "total"), + Value: value.total, + Namespace: metric.Namespace, + ResourceId: metric.ResourceId, + ResourceSubId: metric.ResourceSubId, + TimeGrain: metric.TimeGrain, + Dimensions: dimensions, + Timestamp: value.timestamp, + }) + } + + if value.count != nil { + points = append(points, KeyValuePoint{ + Key: fmt.Sprintf("%s.%s", metricName, "count"), + Value: value.count, + Namespace: metric.Namespace, + ResourceId: metric.ResourceId, + ResourceSubId: metric.ResourceSubId, + TimeGrain: metric.TimeGrain, + Dimensions: dimensions, + Timestamp: value.timestamp, + }) + } } } diff --git a/x-pack/metricbeat/module/azure/data_test.go b/x-pack/metricbeat/module/azure/data_test.go index 85b781ed64ec..b79eb7807132 100644 --- a/x-pack/metricbeat/module/azure/data_test.go +++ b/x-pack/metricbeat/module/azure/data_test.go @@ -62,7 +62,60 @@ func TestMapToKeyValuePoints(t *testing.T) { resourceSubId := "test" timeGrain := "PT1M" - t.Run("test aggregation types", func(t *testing.T) { + t.Run("test multiple aggregation types", func(t *testing.T) { + metrics := []Metric{{ + Namespace: namespace, + Names: []string{"test"}, + Aggregations: "Minimum,Maximum,Average", + Values: []MetricValue{ + {name: metricName, min: &minValue, timestamp: timestamp}, + {name: metricName, max: &maxValue, timestamp: timestamp}, + {name: metricName, avg: &avgValue, timestamp: timestamp}, + }, + TimeGrain: timeGrain, + ResourceId: resourceId, + ResourceSubId: resourceSubId, + }} + + actual := mapToKeyValuePoints(metrics) + + expected := []KeyValuePoint{ + { + Key: fmt.Sprintf("%s.%s", metricName, "min"), + Value: &minValue, + Namespace: namespace, + TimeGrain: timeGrain, + Timestamp: timestamp, + ResourceId: resourceId, + ResourceSubId: resourceSubId, + Dimensions: map[string]interface{}{}, + }, + { + Key: fmt.Sprintf("%s.%s", metricName, "max"), + Value: &maxValue, + Namespace: namespace, + TimeGrain: timeGrain, + Timestamp: timestamp, + ResourceId: resourceId, + ResourceSubId: resourceSubId, + Dimensions: map[string]interface{}{}, + }, + { + Key: fmt.Sprintf("%s.%s", metricName, "avg"), + Value: &avgValue, + Namespace: namespace, + TimeGrain: timeGrain, + Timestamp: timestamp, + ResourceId: resourceId, + ResourceSubId: resourceSubId, + Dimensions: map[string]interface{}{}, + }, + } + + assert.Equal(t, expected, actual) + }) + + t.Run("test single aggregation types", func(t *testing.T) { metrics := []Metric{{ Namespace: namespace,