Skip to content

Commit

Permalink
Merge branch 'main' into fix-3053
Browse files Browse the repository at this point in the history
  • Loading branch information
MrAlias committed Nov 11, 2022
2 parents 84d3e4f + d091ba8 commit 3b041c5
Show file tree
Hide file tree
Showing 8 changed files with 89 additions and 49 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Expand Up @@ -42,6 +42,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
- Cumulative metrics from the OpenCensus bridge (`go.opentelemetry.io/otel/bridge/opencensus`) are defined as monotonic sums, instead of non-monotonic. (#3389)
- Asynchronous counters (`Counter` and `UpDownCounter`) from the metric SDK now produce delta sums when configured with delta temporality. (#3398)
- Exported `Status` codes in the `go.opentelemetry.io/otel/exporters/zipkin` exporter are now exported as all upper case values. (#3340)
- `Aggregation`s from `go.opentelemetry.io/otel/sdk/metric` with no data are not exported. (#3394, #3436)
- Reenabled Attribute Filters in the Metric SDK. (#3396)
- Asynchronous callbacks are only called if they are registered with at least one instrument that does not use drop aggragation. (#3408)
- Do not report empty partial-success responses in the `go.opentelemetry.io/otel/exporters/otlp` exporters. (#3438, #3432)
Expand Down
22 changes: 12 additions & 10 deletions sdk/metric/internal/histogram.go
Expand Up @@ -130,20 +130,21 @@ type deltaHistogram[N int64 | float64] struct {
}

func (s *deltaHistogram[N]) Aggregation() metricdata.Aggregation {
h := metricdata.Histogram{Temporality: metricdata.DeltaTemporality}

s.valuesMu.Lock()
defer s.valuesMu.Unlock()

if len(s.values) == 0 {
return h
return nil
}

t := now()
// Do not allow modification of our copy of bounds.
bounds := make([]float64, len(s.bounds))
copy(bounds, s.bounds)
t := now()
h.DataPoints = make([]metricdata.HistogramDataPoint, 0, len(s.values))
h := metricdata.Histogram{
Temporality: metricdata.DeltaTemporality,
DataPoints: make([]metricdata.HistogramDataPoint, 0, len(s.values)),
}
for a, b := range s.values {
hdp := metricdata.HistogramDataPoint{
Attributes: a,
Expand Down Expand Up @@ -192,20 +193,21 @@ type cumulativeHistogram[N int64 | float64] struct {
}

func (s *cumulativeHistogram[N]) Aggregation() metricdata.Aggregation {
h := metricdata.Histogram{Temporality: metricdata.CumulativeTemporality}

s.valuesMu.Lock()
defer s.valuesMu.Unlock()

if len(s.values) == 0 {
return h
return nil
}

t := now()
// Do not allow modification of our copy of bounds.
bounds := make([]float64, len(s.bounds))
copy(bounds, s.bounds)
t := now()
h.DataPoints = make([]metricdata.HistogramDataPoint, 0, len(s.values))
h := metricdata.Histogram{
Temporality: metricdata.CumulativeTemporality,
DataPoints: make([]metricdata.HistogramDataPoint, 0, len(s.values)),
}
for a, b := range s.values {
// The HistogramDataPoint field values returned need to be copies of
// the buckets value as we will keep updating them.
Expand Down
13 changes: 10 additions & 3 deletions sdk/metric/internal/histogram_test.go
Expand Up @@ -169,24 +169,31 @@ func TestCumulativeHistogramImutableCounts(t *testing.T) {
func TestDeltaHistogramReset(t *testing.T) {
t.Cleanup(mockTime(now))

expect := metricdata.Histogram{Temporality: metricdata.DeltaTemporality}
a := NewDeltaHistogram[int64](histConf)
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
assert.Nil(t, a.Aggregation())

a.Aggregate(1, alice)
expect := metricdata.Histogram{Temporality: metricdata.DeltaTemporality}
expect.DataPoints = []metricdata.HistogramDataPoint{hPoint(alice, 1, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())

// The attr set should be forgotten once Aggregations is called.
expect.DataPoints = nil
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
assert.Nil(t, a.Aggregation())

// Aggregating another set should not affect the original (alice).
a.Aggregate(1, bob)
expect.DataPoints = []metricdata.HistogramDataPoint{hPoint(bob, 1, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
}

func TestEmptyHistogramNilAggregation(t *testing.T) {
assert.Nil(t, NewCumulativeHistogram[int64](histConf).Aggregation())
assert.Nil(t, NewCumulativeHistogram[float64](histConf).Aggregation())
assert.Nil(t, NewDeltaHistogram[int64](histConf).Aggregation())
assert.Nil(t, NewDeltaHistogram[float64](histConf).Aggregation())
}

func BenchmarkHistogram(b *testing.B) {
b.Run("Int64", benchmarkHistogram[int64])
b.Run("Float64", benchmarkHistogram[float64])
Expand Down
8 changes: 4 additions & 4 deletions sdk/metric/internal/lastvalue.go
Expand Up @@ -49,16 +49,16 @@ func (s *lastValue[N]) Aggregate(value N, attr attribute.Set) {
}

func (s *lastValue[N]) Aggregation() metricdata.Aggregation {
gauge := metricdata.Gauge[N]{}

s.Lock()
defer s.Unlock()

if len(s.values) == 0 {
return gauge
return nil
}

gauge.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.values))
gauge := metricdata.Gauge[N]{
DataPoints: make([]metricdata.DataPoint[N], 0, len(s.values)),
}
for a, v := range s.values {
gauge.DataPoints = append(gauge.DataPoints, metricdata.DataPoint[N]{
Attributes: a,
Expand Down
24 changes: 16 additions & 8 deletions sdk/metric/internal/lastvalue_test.go
Expand Up @@ -17,6 +17,8 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal"
import (
"testing"

"github.com/stretchr/testify/assert"

"go.opentelemetry.io/otel/sdk/metric/metricdata"
"go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest"
)
Expand Down Expand Up @@ -52,20 +54,21 @@ func testLastValueReset[N int64 | float64](t *testing.T) {
t.Cleanup(mockTime(now))

a := NewLastValue[N]()
expect := metricdata.Gauge[N]{}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
assert.Nil(t, a.Aggregation())

a.Aggregate(1, alice)
expect.DataPoints = []metricdata.DataPoint[N]{{
Attributes: alice,
Time: now(),
Value: 1,
}}
expect := metricdata.Gauge[N]{
DataPoints: []metricdata.DataPoint[N]{{
Attributes: alice,
Time: now(),
Value: 1,
}},
}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())

// The attr set should be forgotten once Aggregations is called.
expect.DataPoints = nil
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
assert.Nil(t, a.Aggregation())

// Aggregating another set should not affect the original (alice).
a.Aggregate(1, bob)
Expand All @@ -82,6 +85,11 @@ func TestLastValueReset(t *testing.T) {
t.Run("Float64", testLastValueReset[float64])
}

func TestEmptyLastValueNilAggregation(t *testing.T) {
assert.Nil(t, NewLastValue[int64]().Aggregation())
assert.Nil(t, NewLastValue[float64]().Aggregation())
}

func BenchmarkLastValue(b *testing.B) {
b.Run("Int64", benchmarkAggregator(NewLastValue[int64]))
b.Run("Float64", benchmarkAggregator(NewLastValue[float64]))
Expand Down
39 changes: 18 additions & 21 deletions sdk/metric/internal/sum.go
Expand Up @@ -76,20 +76,19 @@ type deltaSum[N int64 | float64] struct {
}

func (s *deltaSum[N]) Aggregation() metricdata.Aggregation {
out := metricdata.Sum[N]{
Temporality: metricdata.DeltaTemporality,
IsMonotonic: s.monotonic,
}

s.Lock()
defer s.Unlock()

if len(s.values) == 0 {
return out
return nil
}

t := now()
out.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.values))
out := metricdata.Sum[N]{
Temporality: metricdata.DeltaTemporality,
IsMonotonic: s.monotonic,
DataPoints: make([]metricdata.DataPoint[N], 0, len(s.values)),
}
for attr, value := range s.values {
out.DataPoints = append(out.DataPoints, metricdata.DataPoint[N]{
Attributes: attr,
Expand Down Expand Up @@ -137,20 +136,19 @@ type cumulativeSum[N int64 | float64] struct {
}

func (s *cumulativeSum[N]) Aggregation() metricdata.Aggregation {
out := metricdata.Sum[N]{
Temporality: metricdata.CumulativeTemporality,
IsMonotonic: s.monotonic,
}

s.Lock()
defer s.Unlock()

if len(s.values) == 0 {
return out
return nil
}

t := now()
out.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.values))
out := metricdata.Sum[N]{
Temporality: metricdata.CumulativeTemporality,
IsMonotonic: s.monotonic,
DataPoints: make([]metricdata.DataPoint[N], 0, len(s.values)),
}
for attr, value := range s.values {
out.DataPoints = append(out.DataPoints, metricdata.DataPoint[N]{
Attributes: attr,
Expand Down Expand Up @@ -204,20 +202,19 @@ func (s *precomputedDeltaSum[N]) Aggregate(value N, attr attribute.Set) {
}

func (s *precomputedDeltaSum[N]) Aggregation() metricdata.Aggregation {
out := metricdata.Sum[N]{
Temporality: metricdata.DeltaTemporality,
IsMonotonic: s.monotonic,
}

s.Lock()
defer s.Unlock()

if len(s.recorded) == 0 {
return out
return nil
}

t := now()
out.DataPoints = make([]metricdata.DataPoint[N], 0, len(s.recorded))
out := metricdata.Sum[N]{
Temporality: metricdata.DeltaTemporality,
IsMonotonic: s.monotonic,
DataPoints: make([]metricdata.DataPoint[N], 0, len(s.recorded)),
}
for attr, recorded := range s.recorded {
value := recorded - s.reported[attr]
out.DataPoints = append(out.DataPoints, metricdata.DataPoint[N]{
Expand Down
27 changes: 24 additions & 3 deletions sdk/metric/internal/sum_test.go
Expand Up @@ -17,6 +17,8 @@ package internal // import "go.opentelemetry.io/otel/sdk/metric/internal"
import (
"testing"

"github.com/stretchr/testify/assert"

"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/sdk/metric/metricdata"
"go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest"
Expand Down Expand Up @@ -138,17 +140,17 @@ func point[N int64 | float64](a attribute.Set, v N) metricdata.DataPoint[N] {
func testDeltaSumReset[N int64 | float64](t *testing.T) {
t.Cleanup(mockTime(now))

expect := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality}
a := NewDeltaSum[N](false)
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
assert.Nil(t, a.Aggregation())

a.Aggregate(1, alice)
expect := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality}
expect.DataPoints = []metricdata.DataPoint[N]{point[N](alice, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())

// The attr set should be forgotten once Aggregations is called.
expect.DataPoints = nil
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
assert.Nil(t, a.Aggregation())

// Aggregating another set should not affect the original (alice).
a.Aggregate(1, bob)
Expand All @@ -161,6 +163,25 @@ func TestDeltaSumReset(t *testing.T) {
t.Run("Float64", testDeltaSumReset[float64])
}

func TestEmptySumNilAggregation(t *testing.T) {
assert.Nil(t, NewCumulativeSum[int64](true).Aggregation())
assert.Nil(t, NewCumulativeSum[int64](false).Aggregation())
assert.Nil(t, NewCumulativeSum[float64](true).Aggregation())
assert.Nil(t, NewCumulativeSum[float64](false).Aggregation())
assert.Nil(t, NewDeltaSum[int64](true).Aggregation())
assert.Nil(t, NewDeltaSum[int64](false).Aggregation())
assert.Nil(t, NewDeltaSum[float64](true).Aggregation())
assert.Nil(t, NewDeltaSum[float64](false).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[int64](true).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[int64](false).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[float64](true).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[float64](false).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[int64](true).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[int64](false).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[float64](true).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[float64](false).Aggregation())
}

func BenchmarkSum(b *testing.B) {
b.Run("Int64", benchmarkSum[int64])
b.Run("Float64", benchmarkSum[float64])
Expand Down
4 changes: 4 additions & 0 deletions sdk/metric/pipeline_test.go
Expand Up @@ -167,6 +167,9 @@ func testDefaultViewImplicit[N int64 | float64]() func(t *testing.T) {
got, err := i.Instrument(inst)
require.NoError(t, err)
assert.Len(t, got, 1, "default view not applied")
for _, a := range got {
a.Aggregate(1, *attribute.EmptySet())
}

out, err := test.pipe.produce(context.Background())
require.NoError(t, err)
Expand All @@ -180,6 +183,7 @@ func testDefaultViewImplicit[N int64 | float64]() func(t *testing.T) {
Data: metricdata.Sum[N]{
Temporality: metricdata.CumulativeTemporality,
IsMonotonic: true,
DataPoints: []metricdata.DataPoint[N]{{Value: N(1)}},
},
}, sm.Metrics[0], metricdatatest.IgnoreTimestamp())
})
Expand Down

0 comments on commit 3b041c5

Please sign in to comment.