You've already forked opentelemetry-go
mirror of
https://github.com/open-telemetry/opentelemetry-go.git
synced 2025-11-25 22:41:46 +02:00
Topic: #5249
This reverts commit 8041156518 (PR: #5899)
due to the performance degradation found by Benchmarks CI
https://github.com/open-telemetry/opentelemetry-go/actions/runs/11447364022/job/31848519243
Here is the benchmark test on my machine:
```
goos: darwin
goarch: arm64
pkg: go.opentelemetry.io/otel/sdk/metric
│ old.txt │ new.txt │
│ sec/op │ sec/op vs base │
Instrument/instrumentImpl/aggregate-10 3.378µ ± 3% 49.366µ ± 1% +1361.40% (p=0.000 n=10)
Instrument/observable/observe-10 2.288µ ± 2% 37.791µ ± 1% +1551.73% (p=0.000 n=10)
geomean 2.780µ 43.19µ +1453.65%
│ old.txt │ new.txt │
│ B/op │ B/op vs base │
Instrument/instrumentImpl/aggregate-10 1.245Ki ± 1% 22.363Ki ± 0% +1696.08% (p=0.000 n=10)
Instrument/observable/observe-10 823.0 ± 1% 17432.5 ± 0% +2018.17% (p=0.000 n=10)
geomean 1.000Ki 19.51Ki +1850.48%
│ old.txt │ new.txt │
│ allocs/op │ allocs/op vs base │
Instrument/instrumentImpl/aggregate-10 1.000 ± 0% 21.000 ± 0% +2000.00% (p=0.000 n=10)
Instrument/observable/observe-10 1.000 ± 0% 16.000 ± 0% +1500.00% (p=0.000 n=10)
```
587 lines
13 KiB
Go
587 lines
13 KiB
Go
// Copyright The OpenTelemetry Authors
|
|
// SPDX-License-Identifier: Apache-2.0
|
|
|
|
package aggregate // import "go.opentelemetry.io/otel/sdk/metric/internal/aggregate"
|
|
|
|
import (
|
|
"context"
|
|
"testing"
|
|
|
|
"go.opentelemetry.io/otel/sdk/metric/metricdata"
|
|
)
|
|
|
|
func TestSum(t *testing.T) {
|
|
c := new(clock)
|
|
t.Cleanup(c.Register())
|
|
|
|
t.Run("Int64/DeltaSum", testDeltaSum[int64]())
|
|
c.Reset()
|
|
|
|
t.Run("Float64/DeltaSum", testDeltaSum[float64]())
|
|
c.Reset()
|
|
|
|
t.Run("Int64/CumulativeSum", testCumulativeSum[int64]())
|
|
c.Reset()
|
|
|
|
t.Run("Float64/CumulativeSum", testCumulativeSum[float64]())
|
|
c.Reset()
|
|
|
|
t.Run("Int64/DeltaPrecomputedSum", testDeltaPrecomputedSum[int64]())
|
|
c.Reset()
|
|
|
|
t.Run("Float64/DeltaPrecomputedSum", testDeltaPrecomputedSum[float64]())
|
|
c.Reset()
|
|
|
|
t.Run("Int64/CumulativePrecomputedSum", testCumulativePrecomputedSum[int64]())
|
|
c.Reset()
|
|
|
|
t.Run("Float64/CumulativePrecomputedSum", testCumulativePrecomputedSum[float64]())
|
|
}
|
|
|
|
func testDeltaSum[N int64 | float64]() func(t *testing.T) {
|
|
mono := false
|
|
in, out := Builder[N]{
|
|
Temporality: metricdata.DeltaTemporality,
|
|
Filter: attrFltr,
|
|
AggregationLimit: 3,
|
|
}.Sum(mono)
|
|
ctx := context.Background()
|
|
return test[N](in, out, []teststep[N]{
|
|
{
|
|
input: []arg[N]{},
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, -1, bob},
|
|
{ctx, 1, alice},
|
|
{ctx, 2, alice},
|
|
{ctx, -10, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(1),
|
|
Time: y2kPlus(2),
|
|
Value: 4,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(1),
|
|
Time: y2kPlus(2),
|
|
Value: -11,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 10, alice},
|
|
{ctx, 3, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(2),
|
|
Time: y2kPlus(3),
|
|
Value: 10,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(2),
|
|
Time: y2kPlus(3),
|
|
Value: 3,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{},
|
|
// Delta sums are expected to reset.
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, 1, bob},
|
|
// These will exceed cardinality limit.
|
|
{ctx, 1, carol},
|
|
{ctx, 1, dave},
|
|
},
|
|
expect: output{
|
|
n: 3,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(4),
|
|
Time: y2kPlus(5),
|
|
Value: 1,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(4),
|
|
Time: y2kPlus(5),
|
|
Value: 1,
|
|
},
|
|
{
|
|
Attributes: overflowSet,
|
|
StartTime: y2kPlus(4),
|
|
Time: y2kPlus(5),
|
|
Value: 2,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
})
|
|
}
|
|
|
|
func testCumulativeSum[N int64 | float64]() func(t *testing.T) {
|
|
mono := false
|
|
in, out := Builder[N]{
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
Filter: attrFltr,
|
|
AggregationLimit: 3,
|
|
}.Sum(mono)
|
|
ctx := context.Background()
|
|
return test[N](in, out, []teststep[N]{
|
|
{
|
|
input: []arg[N]{},
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, -1, bob},
|
|
{ctx, 1, alice},
|
|
{ctx, 2, alice},
|
|
{ctx, -10, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(2),
|
|
Value: 4,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(2),
|
|
Value: -11,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 10, alice},
|
|
{ctx, 3, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(3),
|
|
Value: 14,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(3),
|
|
Value: -8,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
// These will exceed cardinality limit.
|
|
{ctx, 1, carol},
|
|
{ctx, 1, dave},
|
|
},
|
|
expect: output{
|
|
n: 3,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(4),
|
|
Value: 14,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(4),
|
|
Value: -8,
|
|
},
|
|
{
|
|
Attributes: overflowSet,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(4),
|
|
Value: 2,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
})
|
|
}
|
|
|
|
func testDeltaPrecomputedSum[N int64 | float64]() func(t *testing.T) {
|
|
mono := false
|
|
in, out := Builder[N]{
|
|
Temporality: metricdata.DeltaTemporality,
|
|
Filter: attrFltr,
|
|
AggregationLimit: 3,
|
|
}.PrecomputedSum(mono)
|
|
ctx := context.Background()
|
|
return test[N](in, out, []teststep[N]{
|
|
{
|
|
input: []arg[N]{},
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, -1, bob},
|
|
{ctx, 1, fltrAlice},
|
|
{ctx, 2, alice},
|
|
{ctx, -10, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(1),
|
|
Time: y2kPlus(2),
|
|
Value: 4,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(1),
|
|
Time: y2kPlus(2),
|
|
Value: -11,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, fltrAlice},
|
|
{ctx, 10, alice},
|
|
{ctx, 3, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(2),
|
|
Time: y2kPlus(3),
|
|
Value: 7,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(2),
|
|
Time: y2kPlus(3),
|
|
Value: 14,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{},
|
|
// Precomputed sums are expected to reset.
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, 1, bob},
|
|
// These will exceed cardinality limit.
|
|
{ctx, 1, carol},
|
|
{ctx, 1, dave},
|
|
},
|
|
expect: output{
|
|
n: 3,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.DeltaTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(4),
|
|
Time: y2kPlus(5),
|
|
Value: 1,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(4),
|
|
Time: y2kPlus(5),
|
|
Value: 1,
|
|
},
|
|
{
|
|
Attributes: overflowSet,
|
|
StartTime: y2kPlus(4),
|
|
Time: y2kPlus(5),
|
|
Value: 2,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
})
|
|
}
|
|
|
|
func testCumulativePrecomputedSum[N int64 | float64]() func(t *testing.T) {
|
|
mono := false
|
|
in, out := Builder[N]{
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
Filter: attrFltr,
|
|
AggregationLimit: 3,
|
|
}.PrecomputedSum(mono)
|
|
ctx := context.Background()
|
|
return test[N](in, out, []teststep[N]{
|
|
{
|
|
input: []arg[N]{},
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, -1, bob},
|
|
{ctx, 1, fltrAlice},
|
|
{ctx, 2, alice},
|
|
{ctx, -10, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(2),
|
|
Value: 4,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(2),
|
|
Value: -11,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, fltrAlice},
|
|
{ctx, 10, alice},
|
|
{ctx, 3, bob},
|
|
},
|
|
expect: output{
|
|
n: 2,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(3),
|
|
Value: 11,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(3),
|
|
Value: 3,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{},
|
|
// Precomputed sums are expected to reset.
|
|
expect: output{
|
|
n: 0,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
input: []arg[N]{
|
|
{ctx, 1, alice},
|
|
{ctx, 1, bob},
|
|
// These will exceed cardinality limit.
|
|
{ctx, 1, carol},
|
|
{ctx, 1, dave},
|
|
},
|
|
expect: output{
|
|
n: 3,
|
|
agg: metricdata.Sum[N]{
|
|
IsMonotonic: mono,
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
DataPoints: []metricdata.DataPoint[N]{
|
|
{
|
|
Attributes: fltrAlice,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(5),
|
|
Value: 1,
|
|
},
|
|
{
|
|
Attributes: fltrBob,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(5),
|
|
Value: 1,
|
|
},
|
|
{
|
|
Attributes: overflowSet,
|
|
StartTime: y2kPlus(0),
|
|
Time: y2kPlus(5),
|
|
Value: 2,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
},
|
|
})
|
|
}
|
|
|
|
func BenchmarkSum(b *testing.B) {
|
|
// The monotonic argument is only used to annotate the Sum returned from
|
|
// the Aggregation method. It should not have an effect on operational
|
|
// performance, therefore, only monotonic=false is benchmarked here.
|
|
b.Run("Int64/Cumulative", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) {
|
|
return Builder[int64]{
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
}.Sum(false)
|
|
}))
|
|
b.Run("Int64/Delta", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) {
|
|
return Builder[int64]{
|
|
Temporality: metricdata.DeltaTemporality,
|
|
}.Sum(false)
|
|
}))
|
|
b.Run("Float64/Cumulative", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) {
|
|
return Builder[float64]{
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
}.Sum(false)
|
|
}))
|
|
b.Run("Float64/Delta", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) {
|
|
return Builder[float64]{
|
|
Temporality: metricdata.DeltaTemporality,
|
|
}.Sum(false)
|
|
}))
|
|
|
|
b.Run("Precomputed/Int64/Cumulative", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) {
|
|
return Builder[int64]{
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
}.PrecomputedSum(false)
|
|
}))
|
|
b.Run("Precomputed/Int64/Delta", benchmarkAggregate(func() (Measure[int64], ComputeAggregation) {
|
|
return Builder[int64]{
|
|
Temporality: metricdata.DeltaTemporality,
|
|
}.PrecomputedSum(false)
|
|
}))
|
|
b.Run("Precomputed/Float64/Cumulative", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) {
|
|
return Builder[float64]{
|
|
Temporality: metricdata.CumulativeTemporality,
|
|
}.PrecomputedSum(false)
|
|
}))
|
|
b.Run("Precomputed/Float64/Delta", benchmarkAggregate(func() (Measure[float64], ComputeAggregation) {
|
|
return Builder[float64]{
|
|
Temporality: metricdata.DeltaTemporality,
|
|
}.PrecomputedSum(false)
|
|
}))
|
|
}
|