1
0
mirror of https://github.com/open-telemetry/opentelemetry-go.git synced 2024-12-12 10:04:29 +02:00
opentelemetry-go/sdk/metric/internal/sum_test.go
Tyler Yahn d091ba88e4
Do not export aggregations without any data points (#3436)
* Return empty nil aggs if no meas

* Update tests with new expected behavior

* Add change to changelog

* Set PR number in changelog

* Run lint

* Fix pipeline_test

* Scope change in changelog to pkg

* Clean up init of agg types
2022-11-11 07:22:27 -08:00

199 lines
6.9 KiB
Go

// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal // import "go.opentelemetry.io/otel/sdk/metric/internal"
import (
"testing"
"github.com/stretchr/testify/assert"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/sdk/metric/metricdata"
"go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest"
)
func TestSum(t *testing.T) {
t.Cleanup(mockTime(now))
t.Run("Int64", testSum[int64])
t.Run("Float64", testSum[float64])
}
func testSum[N int64 | float64](t *testing.T) {
tester := &aggregatorTester[N]{
GoroutineN: defaultGoroutines,
MeasurementN: defaultMeasurements,
CycleN: defaultCycles,
}
t.Run("Delta", func(t *testing.T) {
incr, mono := monoIncr, true
eFunc := deltaExpecter[N](incr, mono)
t.Run("Monotonic", tester.Run(NewDeltaSum[N](mono), incr, eFunc))
incr, mono = nonMonoIncr, false
eFunc = deltaExpecter[N](incr, mono)
t.Run("NonMonotonic", tester.Run(NewDeltaSum[N](mono), incr, eFunc))
})
t.Run("Cumulative", func(t *testing.T) {
incr, mono := monoIncr, true
eFunc := cumuExpecter[N](incr, mono)
t.Run("Monotonic", tester.Run(NewCumulativeSum[N](mono), incr, eFunc))
incr, mono = nonMonoIncr, false
eFunc = cumuExpecter[N](incr, mono)
t.Run("NonMonotonic", tester.Run(NewCumulativeSum[N](mono), incr, eFunc))
})
t.Run("PreComputedDelta", func(t *testing.T) {
incr, mono := monoIncr, true
eFunc := preDeltaExpecter[N](incr, mono)
t.Run("Monotonic", tester.Run(NewPrecomputedDeltaSum[N](mono), incr, eFunc))
incr, mono = nonMonoIncr, false
eFunc = preDeltaExpecter[N](incr, mono)
t.Run("NonMonotonic", tester.Run(NewPrecomputedDeltaSum[N](mono), incr, eFunc))
})
t.Run("PreComputedCumulative", func(t *testing.T) {
incr, mono := monoIncr, true
eFunc := preCumuExpecter[N](incr, mono)
t.Run("Monotonic", tester.Run(NewPrecomputedCumulativeSum[N](mono), incr, eFunc))
incr, mono = nonMonoIncr, false
eFunc = preCumuExpecter[N](incr, mono)
t.Run("NonMonotonic", tester.Run(NewPrecomputedCumulativeSum[N](mono), incr, eFunc))
})
}
func deltaExpecter[N int64 | float64](incr setMap, mono bool) expectFunc {
sum := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality, IsMonotonic: mono}
return func(m int) metricdata.Aggregation {
sum.DataPoints = make([]metricdata.DataPoint[N], 0, len(incr))
for a, v := range incr {
sum.DataPoints = append(sum.DataPoints, point(a, N(v*m)))
}
return sum
}
}
func cumuExpecter[N int64 | float64](incr setMap, mono bool) expectFunc {
var cycle int
sum := metricdata.Sum[N]{Temporality: metricdata.CumulativeTemporality, IsMonotonic: mono}
return func(m int) metricdata.Aggregation {
cycle++
sum.DataPoints = make([]metricdata.DataPoint[N], 0, len(incr))
for a, v := range incr {
sum.DataPoints = append(sum.DataPoints, point(a, N(v*cycle*m)))
}
return sum
}
}
func preDeltaExpecter[N int64 | float64](incr setMap, mono bool) expectFunc {
sum := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality, IsMonotonic: mono}
last := make(map[attribute.Set]N)
return func(int) metricdata.Aggregation {
sum.DataPoints = make([]metricdata.DataPoint[N], 0, len(incr))
for a, v := range incr {
l := last[a]
sum.DataPoints = append(sum.DataPoints, point(a, N(v)-l))
last[a] = N(v)
}
return sum
}
}
func preCumuExpecter[N int64 | float64](incr setMap, mono bool) expectFunc {
sum := metricdata.Sum[N]{Temporality: metricdata.CumulativeTemporality, IsMonotonic: mono}
return func(int) metricdata.Aggregation {
sum.DataPoints = make([]metricdata.DataPoint[N], 0, len(incr))
for a, v := range incr {
sum.DataPoints = append(sum.DataPoints, point(a, N(v)))
}
return sum
}
}
// point returns a DataPoint that started and ended now.
func point[N int64 | float64](a attribute.Set, v N) metricdata.DataPoint[N] {
return metricdata.DataPoint[N]{
Attributes: a,
StartTime: now(),
Time: now(),
Value: N(v),
}
}
func testDeltaSumReset[N int64 | float64](t *testing.T) {
t.Cleanup(mockTime(now))
a := NewDeltaSum[N](false)
assert.Nil(t, a.Aggregation())
a.Aggregate(1, alice)
expect := metricdata.Sum[N]{Temporality: metricdata.DeltaTemporality}
expect.DataPoints = []metricdata.DataPoint[N]{point[N](alice, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
// The attr set should be forgotten once Aggregations is called.
expect.DataPoints = nil
assert.Nil(t, a.Aggregation())
// Aggregating another set should not affect the original (alice).
a.Aggregate(1, bob)
expect.DataPoints = []metricdata.DataPoint[N]{point[N](bob, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
}
func TestDeltaSumReset(t *testing.T) {
t.Run("Int64", testDeltaSumReset[int64])
t.Run("Float64", testDeltaSumReset[float64])
}
func TestEmptySumNilAggregation(t *testing.T) {
assert.Nil(t, NewCumulativeSum[int64](true).Aggregation())
assert.Nil(t, NewCumulativeSum[int64](false).Aggregation())
assert.Nil(t, NewCumulativeSum[float64](true).Aggregation())
assert.Nil(t, NewCumulativeSum[float64](false).Aggregation())
assert.Nil(t, NewDeltaSum[int64](true).Aggregation())
assert.Nil(t, NewDeltaSum[int64](false).Aggregation())
assert.Nil(t, NewDeltaSum[float64](true).Aggregation())
assert.Nil(t, NewDeltaSum[float64](false).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[int64](true).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[int64](false).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[float64](true).Aggregation())
assert.Nil(t, NewPrecomputedCumulativeSum[float64](false).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[int64](true).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[int64](false).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[float64](true).Aggregation())
assert.Nil(t, NewPrecomputedDeltaSum[float64](false).Aggregation())
}
func BenchmarkSum(b *testing.B) {
b.Run("Int64", benchmarkSum[int64])
b.Run("Float64", benchmarkSum[float64])
}
func benchmarkSum[N int64 | float64](b *testing.B) {
// The monotonic argument is only used to annotate the Sum returned from
// the Aggregation method. It should not have an effect on operational
// performance, therefore, only monotonic=false is benchmarked here.
factory := func() Aggregator[N] { return NewDeltaSum[N](false) }
b.Run("Delta", benchmarkAggregator(factory))
factory = func() Aggregator[N] { return NewCumulativeSum[N](false) }
b.Run("Cumulative", benchmarkAggregator(factory))
}