Skip to content

Commit

Permalink
Instead of supply metric metadata and label for aws calculator, suppl…
Browse files Browse the repository at this point in the history
…y with aws metric key which consists of metric metadata and label (#17207)

* Change to use metric key instead of metric and labels

* Using GoImport to fix linter

* Using GoImport to fix linter

* Using GoImport to fix linter

* Change description and change name

* Change description and change name
  • Loading branch information
khanhntd committed Feb 2, 2023
1 parent 4b18609 commit 6b07384
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 62 deletions.
11 changes: 11 additions & 0 deletions .chloggen/use-key-with-aws-calculator.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: awsemfexporter

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Instead of supply metric metadata and label for aws calculator, supply with aws metric key which consists of metric metadata and label

# One or more tracking issues related to the change
issues: [17207]
23 changes: 5 additions & 18 deletions exporter/awsemfexporter/datapoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,24 +65,11 @@ type dataPoints interface {
type deltaMetricMetadata struct {
adjustToDelta bool
metricName string
timestampMs int64
namespace string
logGroup string
logStream string
}

func mergeLabels(m deltaMetricMetadata, labels map[string]string) map[string]string {
result := map[string]string{
"namespace": m.namespace,
"logGroup": m.logGroup,
"logStream": m.logStream,
}
for k, v := range labels {
result[k] = v
}
return result
}

// numberDataPointSlice is a wrapper for pmetric.NumberDataPointSlice
type numberDataPointSlice struct {
instrumentationLibraryName string
Expand Down Expand Up @@ -125,8 +112,8 @@ func (dps numberDataPointSlice) At(i int) (dataPoint, bool) {
retained := true
if dps.adjustToDelta {
var deltaVal interface{}
deltaVal, retained = deltaMetricCalculator.Calculate(dps.metricName, mergeLabels(dps.deltaMetricMetadata, labels),
metricVal, metric.Timestamp().AsTime())
mKey := aws.NewKey(dps.deltaMetricMetadata, labels)
deltaVal, retained = deltaMetricCalculator.Calculate(mKey, metricVal, metric.Timestamp().AsTime())
if !retained {
return dataPoint{}, retained
}
Expand Down Expand Up @@ -173,8 +160,9 @@ func (dps summaryDataPointSlice) At(i int) (dataPoint, bool) {
retained := true
if dps.adjustToDelta {
var delta interface{}
delta, retained = summaryMetricCalculator.Calculate(dps.metricName, mergeLabels(dps.deltaMetricMetadata, labels),
summaryMetricEntry{metric.Sum(), metric.Count()}, metric.Timestamp().AsTime())
mKey := aws.NewKey(dps.deltaMetricMetadata, labels)

delta, retained = summaryMetricCalculator.Calculate(mKey, summaryMetricEntry{sum, count}, metric.Timestamp().AsTime())
if !retained {
return dataPoint{}, retained
}
Expand Down Expand Up @@ -221,7 +209,6 @@ func getDataPoints(pmd pmetric.Metric, metadata cWMetricMetadata, logger *zap.Lo
adjusterMetadata := deltaMetricMetadata{
false,
pmd.Name(),
metadata.timestampMs,
metadata.namespace,
metadata.logGroup,
metadata.logStream,
Expand Down
6 changes: 0 additions & 6 deletions exporter/awsemfexporter/datapoint_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,6 @@ func TestIntDataPointSliceAt(t *testing.T) {
deltaMetricMetadata{
tc.adjustToDelta,
"foo",
0,
"namespace",
"log-group",
"log-stream",
Expand Down Expand Up @@ -387,7 +386,6 @@ func TestDoubleDataPointSliceAt(t *testing.T) {
deltaMetricMetadata{
tc.adjustToDelta,
"foo",
0,
"namespace",
"log-group",
"log-stream",
Expand Down Expand Up @@ -507,7 +505,6 @@ func TestSummaryDataPointSliceAt(t *testing.T) {
setupDataPointCache()

instrLibName := "cloudwatch-otel"
metadataTimeStamp := time.Now().UnixNano() / int64(time.Millisecond)

testCases := []struct {
testName string
Expand Down Expand Up @@ -552,7 +549,6 @@ func TestSummaryDataPointSliceAt(t *testing.T) {
deltaMetricMetadata{
true,
"foo",
metadataTimeStamp,
"namespace",
"log-group",
"log-stream",
Expand Down Expand Up @@ -625,15 +621,13 @@ func TestGetDataPoints(t *testing.T) {
dmm := deltaMetricMetadata{
false,
"foo",
metadata.timestampMs,
"namespace",
"log-group",
"log-stream",
}
cumulativeDmm := deltaMetricMetadata{
true,
"foo",
metadata.timestampMs,
"namespace",
"log-group",
"log-stream",
Expand Down
6 changes: 1 addition & 5 deletions exporter/awsemfexporter/grouped_metric.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ func addToGroupedMetric(pmd pmetric.Metric, groupedMetrics map[interface{}]*grou
}

// Extra params to use when grouping metrics
groupKey := groupedMetricKey(metadata.groupedMetricMetadata, labels)
groupKey := aws.NewKey(metadata, labels)
if _, ok := groupedMetrics[groupKey]; ok {
// if MetricName already exists in metrics map, print warning log
if _, ok := groupedMetrics[groupKey].metrics[metricName]; ok {
Expand Down Expand Up @@ -178,10 +178,6 @@ func mapGetHelper(labels map[string]string, key string) string {
return ""
}

func groupedMetricKey(metadata groupedMetricMetadata, labels map[string]string) aws.Key {
return aws.NewKey(metadata, labels)
}

func translateUnit(metric pmetric.Metric, descriptor map[string]MetricDescriptor) string {
unit := metric.Unit()
if descriptor, exists := descriptor[metric.Name()]; exists {
Expand Down
12 changes: 6 additions & 6 deletions internal/aws/metrics/metric_calculator.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,11 @@ func NewMetricCalculator(calculateFunc CalculateFunc) MetricCalculator {
}
}

// Calculate accepts a new metric value identified by matricName and labels, and delegates
// the calculation with value and timestamp back to CalculateFunc for the result. Returns
// Calculate accepts a new metric value identified by metric key (consists of metric metadata and labels),
// https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/eacfde3fcbd46ba60a6db0e9a41977390c4883bd/internal/aws/metrics/metric_calculator.go#L88-L91
// and delegates the calculation with value and timestamp back to CalculateFunc for the result. Returns
// true if the calculation is executed successfully.
func (rm *MetricCalculator) Calculate(metricName string, labels map[string]string, value interface{}, timestamp time.Time) (interface{}, bool) {
k := NewKey(metricName, labels)
func (rm *MetricCalculator) Calculate(mKey Key, value interface{}, timestamp time.Time) (interface{}, bool) {
cacheStore := rm.cache

var result interface{}
Expand All @@ -74,10 +74,10 @@ func (rm *MetricCalculator) Calculate(metricName string, labels map[string]strin
rm.lock.Lock()
defer rm.lock.Unlock()

prev, exists := cacheStore.Get(k)
prev, exists := cacheStore.Get(mKey)
result, done = rm.calculateFunc(prev, value, timestamp)
if !exists || done {
cacheStore.Set(k, MetricValue{
cacheStore.Set(mKey, MetricValue{
RawValue: value,
Timestamp: timestamp,
})
Expand Down
50 changes: 25 additions & 25 deletions internal/aws/metrics/metric_calculator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,37 +24,37 @@ import (
)

func TestFloat64RateCalculator(t *testing.T) {
MetricMetadata := "rate"
mKey := NewKey("rate", nil)
initTime := time.Now()
c := newFloat64RateCalculator()
r, ok := c.Calculate(MetricMetadata, nil, float64(50), initTime)
r, ok := c.Calculate(mKey, float64(50), initTime)
assert.False(t, ok)
assert.Equal(t, float64(0), r)

nextTime := initTime.Add(100 * time.Millisecond)
r, ok = c.Calculate(MetricMetadata, nil, float64(100), nextTime)
r, ok = c.Calculate(mKey, float64(100), nextTime)
assert.True(t, ok)
assert.InDelta(t, 0.5, r, 0.1)
}

func TestFloat64RateCalculatorWithTooFrequentUpdate(t *testing.T) {
MetricMetadata := "rate"
mKey := NewKey("rate", nil)
initTime := time.Now()
c := newFloat64RateCalculator()
r, ok := c.Calculate(MetricMetadata, nil, float64(50), initTime)
r, ok := c.Calculate(mKey, float64(50), initTime)
assert.False(t, ok)
assert.Equal(t, float64(0), r)

nextTime := initTime
for i := 0; i < 10; i++ {
nextTime = nextTime.Add(5 * time.Millisecond)
r, ok = c.Calculate(MetricMetadata, nil, float64(105), nextTime)
r, ok = c.Calculate(mKey, float64(105), nextTime)
assert.False(t, ok)
assert.Equal(t, float64(0), r)
}

nextTime = nextTime.Add(5 * time.Millisecond)
r, ok = c.Calculate(MetricMetadata, nil, float64(105), nextTime)
r, ok = c.Calculate(mKey, float64(105), nextTime)
assert.True(t, ok)
assert.InDelta(t, 1, r, 0.1)
}
Expand All @@ -73,13 +73,13 @@ func newFloat64RateCalculator() MetricCalculator {
}

func TestFloat64DeltaCalculator(t *testing.T) {
MetricMetadata := "delta"
mKey := NewKey("delta", nil)
initTime := time.Now()
c := NewFloat64DeltaCalculator()

testCases := []float64{0.1, 0.1, 0.5, 1.3, 1.9, 2.5, 5, 24.2, 103}
for i, f := range testCases {
r, ok := c.Calculate(MetricMetadata, nil, f, initTime)
r, ok := c.Calculate(mKey, f, initTime)
assert.Equal(t, i > 0, ok)
if i == 0 {
assert.Equal(t, float64(0), r)
Expand All @@ -90,13 +90,13 @@ func TestFloat64DeltaCalculator(t *testing.T) {
}

func TestFloat64DeltaCalculatorWithDecreasingValues(t *testing.T) {
MetricMetadata := "delta"
mKey := NewKey("delta", nil)
initTime := time.Now()
c := NewFloat64DeltaCalculator()

testCases := []float64{108, 106, 56.2, 28.8, 10, 10, 3, -1, -100}
for i, f := range testCases {
r, ok := c.Calculate(MetricMetadata, nil, f, initTime)
r, ok := c.Calculate(mKey, f, initTime)
assert.Equal(t, i > 0, ok)
if ok {
assert.Equal(t, testCases[i]-testCases[i-1], r)
Expand Down Expand Up @@ -186,19 +186,19 @@ func TestMapKeyEquals(t *testing.T) {
labelMap2["k2"] = "v2"
labelMap2["k1"] = "v1"

key1 := NewKey("name", labelMap1)
key2 := NewKey("name", labelMap2)
assert.Equal(t, key1, key2)
mKey1 := NewKey("name", labelMap1)
mKey2 := NewKey("name", labelMap2)
assert.Equal(t, mKey1, mKey2)

key1 = NewKey(mockKey{
mKey1 = NewKey(mockKey{
name: "name",
index: 1,
}, labelMap1)
key2 = NewKey(mockKey{
mKey2 = NewKey(mockKey{
name: "name",
index: 1,
}, labelMap2)
assert.Equal(t, key1, key2)
assert.Equal(t, mKey1, mKey2)
}

func TestMapKeyNotEqualOnName(t *testing.T) {
Expand All @@ -210,23 +210,23 @@ func TestMapKeyNotEqualOnName(t *testing.T) {
labelMap2["k2"] = "v2"
labelMap2["k1"] = "v1"

key1 := NewKey("name1", labelMap1)
key2 := NewKey("name2", labelMap2)
assert.NotEqual(t, key1, key2)
mKey1 := NewKey("name1", labelMap1)
mKey2 := NewKey("name2", labelMap2)
assert.NotEqual(t, mKey1, mKey2)

key1 = NewKey(mockKey{
mKey1 = NewKey(mockKey{
name: "name",
index: 1,
}, labelMap1)
key2 = NewKey(mockKey{
mKey2 = NewKey(mockKey{
name: "name",
index: 2,
}, labelMap2)
assert.NotEqual(t, key1, key2)
assert.NotEqual(t, mKey1, mKey2)

key2 = NewKey(mockKey{
mKey2 = NewKey(mockKey{
name: "name0",
index: 1,
}, labelMap2)
assert.NotEqual(t, key1, key2)
assert.NotEqual(t, mKey1, mKey2)
}
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ func newFloat64RateCalculator() awsmetrics.MetricCalculator {

func assignRateValueToField(rateCalculator *awsmetrics.MetricCalculator, fields map[string]interface{}, metricName string,
cinfoName string, curVal interface{}, curTime time.Time, multiplier float64) {
key := cinfoName + metricName
if val, ok := rateCalculator.Calculate(key, nil, curVal, curTime); ok {
mKey := awsmetrics.NewKey(cinfoName+metricName, nil)
if val, ok := rateCalculator.Calculate(mKey, curVal, curTime); ok {
fields[metricName] = val.(float64) * multiplier
}
}
Expand Down

0 comments on commit 6b07384

Please sign in to comment.